@cloudflare/sandbox 0.7.16 → 0.7.18
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.d.ts +2 -2
- package/dist/index.js +652 -208
- package/dist/index.js.map +1 -1
- package/dist/openai/index.d.ts +1 -1
- package/dist/opencode/index.d.ts +8 -1
- package/dist/opencode/index.d.ts.map +1 -1
- package/dist/opencode/index.js +10 -4
- package/dist/opencode/index.js.map +1 -1
- package/dist/{sandbox-DKzgVy6K.d.ts → sandbox-CTUXe5eK.d.ts} +81 -7
- package/dist/sandbox-CTUXe5eK.d.ts.map +1 -0
- package/package.json +1 -1
- package/dist/sandbox-DKzgVy6K.d.ts.map +0 -1
package/dist/index.js
CHANGED
|
@@ -2,6 +2,7 @@ import { _ as filterEnvVars, a as isExecResult, c as parseSSEFrames, d as create
|
|
|
2
2
|
import { t as ErrorCode } from "./errors-CaSfB5Bm.js";
|
|
3
3
|
import { Container, getContainer, switchPort } from "@cloudflare/containers";
|
|
4
4
|
import { AwsClient } from "aws4fetch";
|
|
5
|
+
import path from "node:path/posix";
|
|
5
6
|
|
|
6
7
|
//#region src/errors/classes.ts
|
|
7
8
|
/**
|
|
@@ -742,11 +743,11 @@ var BaseTransport = class {
|
|
|
742
743
|
* This is the primary entry point for making requests. It wraps the
|
|
743
744
|
* transport-specific doFetch() with retry logic for container startup.
|
|
744
745
|
*/
|
|
745
|
-
async fetch(path, options) {
|
|
746
|
+
async fetch(path$1, options) {
|
|
746
747
|
const startTime = Date.now();
|
|
747
748
|
let attempt = 0;
|
|
748
749
|
while (true) {
|
|
749
|
-
const response = await this.doFetch(path, options);
|
|
750
|
+
const response = await this.doFetch(path$1, options);
|
|
750
751
|
if (response.status === 503) {
|
|
751
752
|
const elapsed = Date.now() - startTime;
|
|
752
753
|
const remaining = this.retryTimeoutMs - elapsed;
|
|
@@ -798,13 +799,13 @@ var HttpTransport = class extends BaseTransport {
|
|
|
798
799
|
isConnected() {
|
|
799
800
|
return true;
|
|
800
801
|
}
|
|
801
|
-
async doFetch(path, options) {
|
|
802
|
-
const url = this.buildUrl(path);
|
|
802
|
+
async doFetch(path$1, options) {
|
|
803
|
+
const url = this.buildUrl(path$1);
|
|
803
804
|
if (this.config.stub) return this.config.stub.containerFetch(url, options || {}, this.config.port);
|
|
804
805
|
return globalThis.fetch(url, options);
|
|
805
806
|
}
|
|
806
|
-
async fetchStream(path, body, method = "POST") {
|
|
807
|
-
const url = this.buildUrl(path);
|
|
807
|
+
async fetchStream(path$1, body, method = "POST") {
|
|
808
|
+
const url = this.buildUrl(path$1);
|
|
808
809
|
const options = this.buildStreamOptions(body, method);
|
|
809
810
|
let response;
|
|
810
811
|
if (this.config.stub) response = await this.config.stub.containerFetch(url, options, this.config.port);
|
|
@@ -816,9 +817,9 @@ var HttpTransport = class extends BaseTransport {
|
|
|
816
817
|
if (!response.body) throw new Error("No response body for streaming");
|
|
817
818
|
return response.body;
|
|
818
819
|
}
|
|
819
|
-
buildUrl(path) {
|
|
820
|
-
if (this.config.stub) return `http://localhost:${this.config.port}${path}`;
|
|
821
|
-
return `${this.baseUrl}${path}`;
|
|
820
|
+
buildUrl(path$1) {
|
|
821
|
+
if (this.config.stub) return `http://localhost:${this.config.port}${path$1}`;
|
|
822
|
+
return `${this.baseUrl}${path$1}`;
|
|
822
823
|
}
|
|
823
824
|
buildStreamOptions(body, method) {
|
|
824
825
|
return {
|
|
@@ -877,9 +878,8 @@ var WebSocketTransport = class extends BaseTransport {
|
|
|
877
878
|
this.connectPromise = this.doConnect();
|
|
878
879
|
try {
|
|
879
880
|
await this.connectPromise;
|
|
880
|
-
}
|
|
881
|
+
} finally {
|
|
881
882
|
this.connectPromise = null;
|
|
882
|
-
throw error;
|
|
883
883
|
}
|
|
884
884
|
}
|
|
885
885
|
/**
|
|
@@ -892,11 +892,11 @@ var WebSocketTransport = class extends BaseTransport {
|
|
|
892
892
|
* Transport-specific fetch implementation
|
|
893
893
|
* Converts WebSocket response to standard Response object.
|
|
894
894
|
*/
|
|
895
|
-
async doFetch(path, options) {
|
|
895
|
+
async doFetch(path$1, options) {
|
|
896
896
|
await this.connect();
|
|
897
897
|
const method = options?.method || "GET";
|
|
898
898
|
const body = this.parseBody(options?.body);
|
|
899
|
-
const result = await this.request(method, path, body);
|
|
899
|
+
const result = await this.request(method, path$1, body);
|
|
900
900
|
return new Response(JSON.stringify(result.body), {
|
|
901
901
|
status: result.status,
|
|
902
902
|
headers: { "Content-Type": "application/json" }
|
|
@@ -905,8 +905,8 @@ var WebSocketTransport = class extends BaseTransport {
|
|
|
905
905
|
/**
|
|
906
906
|
* Streaming fetch implementation
|
|
907
907
|
*/
|
|
908
|
-
async fetchStream(path, body, method = "POST") {
|
|
909
|
-
return this.requestStream(method, path, body);
|
|
908
|
+
async fetchStream(path$1, body, method = "POST") {
|
|
909
|
+
return this.requestStream(method, path$1, body);
|
|
910
910
|
}
|
|
911
911
|
/**
|
|
912
912
|
* Parse request body from RequestInit
|
|
@@ -1009,21 +1009,21 @@ var WebSocketTransport = class extends BaseTransport {
|
|
|
1009
1009
|
/**
|
|
1010
1010
|
* Send a request and wait for response
|
|
1011
1011
|
*/
|
|
1012
|
-
async request(method, path, body) {
|
|
1012
|
+
async request(method, path$1, body) {
|
|
1013
1013
|
await this.connect();
|
|
1014
1014
|
const id = generateRequestId();
|
|
1015
1015
|
const request = {
|
|
1016
1016
|
type: "request",
|
|
1017
1017
|
id,
|
|
1018
1018
|
method,
|
|
1019
|
-
path,
|
|
1019
|
+
path: path$1,
|
|
1020
1020
|
body
|
|
1021
1021
|
};
|
|
1022
1022
|
return new Promise((resolve, reject) => {
|
|
1023
1023
|
const timeoutMs = this.config.requestTimeoutMs ?? DEFAULT_REQUEST_TIMEOUT_MS;
|
|
1024
1024
|
const timeoutId = setTimeout(() => {
|
|
1025
1025
|
this.pendingRequests.delete(id);
|
|
1026
|
-
reject(/* @__PURE__ */ new Error(`Request timeout after ${timeoutMs}ms: ${method} ${path}`));
|
|
1026
|
+
reject(/* @__PURE__ */ new Error(`Request timeout after ${timeoutMs}ms: ${method} ${path$1}`));
|
|
1027
1027
|
}, timeoutMs);
|
|
1028
1028
|
this.pendingRequests.set(id, {
|
|
1029
1029
|
resolve: (response) => {
|
|
@@ -1065,14 +1065,14 @@ var WebSocketTransport = class extends BaseTransport {
|
|
|
1065
1065
|
* long-running streams (e.g. execStream from an agent) stay alive as long
|
|
1066
1066
|
* as data is flowing. The timer resets on every chunk or response message.
|
|
1067
1067
|
*/
|
|
1068
|
-
async requestStream(method, path, body) {
|
|
1068
|
+
async requestStream(method, path$1, body) {
|
|
1069
1069
|
await this.connect();
|
|
1070
1070
|
const id = generateRequestId();
|
|
1071
1071
|
const request = {
|
|
1072
1072
|
type: "request",
|
|
1073
1073
|
id,
|
|
1074
1074
|
method,
|
|
1075
|
-
path,
|
|
1075
|
+
path: path$1,
|
|
1076
1076
|
body
|
|
1077
1077
|
};
|
|
1078
1078
|
const idleTimeoutMs = this.config.streamIdleTimeoutMs ?? DEFAULT_STREAM_IDLE_TIMEOUT_MS;
|
|
@@ -1082,7 +1082,7 @@ var WebSocketTransport = class extends BaseTransport {
|
|
|
1082
1082
|
const createIdleTimeout = () => {
|
|
1083
1083
|
return setTimeout(() => {
|
|
1084
1084
|
this.pendingRequests.delete(id);
|
|
1085
|
-
const error = /* @__PURE__ */ new Error(`Stream idle timeout after ${idleTimeoutMs}ms: ${method} ${path}`);
|
|
1085
|
+
const error = /* @__PURE__ */ new Error(`Stream idle timeout after ${idleTimeoutMs}ms: ${method} ${path$1}`);
|
|
1086
1086
|
if (firstMessageReceived) try {
|
|
1087
1087
|
streamController?.error(error);
|
|
1088
1088
|
} catch {}
|
|
@@ -1293,6 +1293,7 @@ var WebSocketTransport = class extends BaseTransport {
|
|
|
1293
1293
|
handleClose(event) {
|
|
1294
1294
|
this.state = "disconnected";
|
|
1295
1295
|
this.ws = null;
|
|
1296
|
+
this.connectPromise = null;
|
|
1296
1297
|
const closeError = /* @__PURE__ */ new Error(`WebSocket closed: ${event.code} ${event.reason || "No reason"}`);
|
|
1297
1298
|
for (const [, pending] of this.pendingRequests) {
|
|
1298
1299
|
if (pending.timeoutId) clearTimeout(pending.timeoutId);
|
|
@@ -1396,8 +1397,8 @@ var BaseHttpClient = class {
|
|
|
1396
1397
|
/**
|
|
1397
1398
|
* Core fetch method - delegates to Transport which handles retry logic
|
|
1398
1399
|
*/
|
|
1399
|
-
async doFetch(path, options) {
|
|
1400
|
-
return this.transport.fetch(path, options);
|
|
1400
|
+
async doFetch(path$1, options) {
|
|
1401
|
+
return this.transport.fetch(path$1, options);
|
|
1401
1402
|
}
|
|
1402
1403
|
/**
|
|
1403
1404
|
* Make a POST request with JSON body
|
|
@@ -1480,14 +1481,14 @@ var BaseHttpClient = class {
|
|
|
1480
1481
|
* @param body - Optional request body (for POST requests)
|
|
1481
1482
|
* @param method - HTTP method (default: POST, use GET for process logs)
|
|
1482
1483
|
*/
|
|
1483
|
-
async doStreamFetch(path, body, method = "POST") {
|
|
1484
|
+
async doStreamFetch(path$1, body, method = "POST") {
|
|
1484
1485
|
if (this.transport.getMode() === "websocket") try {
|
|
1485
|
-
return await this.transport.fetchStream(path, body, method);
|
|
1486
|
+
return await this.transport.fetchStream(path$1, body, method);
|
|
1486
1487
|
} catch (error) {
|
|
1487
|
-
this.logError(`stream ${method} ${path}`, error);
|
|
1488
|
+
this.logError(`stream ${method} ${path$1}`, error);
|
|
1488
1489
|
throw error;
|
|
1489
1490
|
}
|
|
1490
|
-
const response = await this.doFetch(path, {
|
|
1491
|
+
const response = await this.doFetch(path$1, {
|
|
1491
1492
|
method,
|
|
1492
1493
|
headers: { "Content-Type": "application/json" },
|
|
1493
1494
|
body: body && method === "POST" ? JSON.stringify(body) : void 0
|
|
@@ -1531,11 +1532,13 @@ var BackupClient = class extends BaseHttpClient {
|
|
|
1531
1532
|
* @param archivePath - Where the container should write the archive
|
|
1532
1533
|
* @param sessionId - Session context
|
|
1533
1534
|
*/
|
|
1534
|
-
async createArchive(dir, archivePath, sessionId) {
|
|
1535
|
+
async createArchive(dir, archivePath, sessionId, gitignore = false, excludes = []) {
|
|
1535
1536
|
try {
|
|
1536
1537
|
const data = {
|
|
1537
1538
|
dir,
|
|
1538
1539
|
archivePath,
|
|
1540
|
+
gitignore,
|
|
1541
|
+
excludes,
|
|
1539
1542
|
sessionId
|
|
1540
1543
|
};
|
|
1541
1544
|
const response = await this.post("/api/backup/create", data);
|
|
@@ -2003,15 +2006,15 @@ var FileClient = class extends BaseHttpClient {
|
|
|
2003
2006
|
* @param sessionId - The session ID for this operation
|
|
2004
2007
|
* @param options - Optional settings (recursive)
|
|
2005
2008
|
*/
|
|
2006
|
-
async mkdir(path, sessionId, options) {
|
|
2009
|
+
async mkdir(path$1, sessionId, options) {
|
|
2007
2010
|
try {
|
|
2008
2011
|
const data = {
|
|
2009
|
-
path,
|
|
2012
|
+
path: path$1,
|
|
2010
2013
|
sessionId,
|
|
2011
2014
|
recursive: options?.recursive ?? false
|
|
2012
2015
|
};
|
|
2013
2016
|
const response = await this.post("/api/mkdir", data);
|
|
2014
|
-
this.logSuccess("Directory created", `${path} (recursive: ${data.recursive})`);
|
|
2017
|
+
this.logSuccess("Directory created", `${path$1} (recursive: ${data.recursive})`);
|
|
2015
2018
|
return response;
|
|
2016
2019
|
} catch (error) {
|
|
2017
2020
|
this.logError("mkdir", error);
|
|
@@ -2025,16 +2028,16 @@ var FileClient = class extends BaseHttpClient {
|
|
|
2025
2028
|
* @param sessionId - The session ID for this operation
|
|
2026
2029
|
* @param options - Optional settings (encoding)
|
|
2027
2030
|
*/
|
|
2028
|
-
async writeFile(path, content, sessionId, options) {
|
|
2031
|
+
async writeFile(path$1, content, sessionId, options) {
|
|
2029
2032
|
try {
|
|
2030
2033
|
const data = {
|
|
2031
|
-
path,
|
|
2034
|
+
path: path$1,
|
|
2032
2035
|
content,
|
|
2033
2036
|
sessionId,
|
|
2034
2037
|
encoding: options?.encoding
|
|
2035
2038
|
};
|
|
2036
2039
|
const response = await this.post("/api/write", data);
|
|
2037
|
-
this.logSuccess("File written", `${path} (${content.length} chars)`);
|
|
2040
|
+
this.logSuccess("File written", `${path$1} (${content.length} chars)`);
|
|
2038
2041
|
return response;
|
|
2039
2042
|
} catch (error) {
|
|
2040
2043
|
this.logError("writeFile", error);
|
|
@@ -2047,15 +2050,15 @@ var FileClient = class extends BaseHttpClient {
|
|
|
2047
2050
|
* @param sessionId - The session ID for this operation
|
|
2048
2051
|
* @param options - Optional settings (encoding)
|
|
2049
2052
|
*/
|
|
2050
|
-
async readFile(path, sessionId, options) {
|
|
2053
|
+
async readFile(path$1, sessionId, options) {
|
|
2051
2054
|
try {
|
|
2052
2055
|
const data = {
|
|
2053
|
-
path,
|
|
2056
|
+
path: path$1,
|
|
2054
2057
|
sessionId,
|
|
2055
2058
|
encoding: options?.encoding
|
|
2056
2059
|
};
|
|
2057
2060
|
const response = await this.post("/api/read", data);
|
|
2058
|
-
this.logSuccess("File read", `${path} (${response.content.length} chars)`);
|
|
2061
|
+
this.logSuccess("File read", `${path$1} (${response.content.length} chars)`);
|
|
2059
2062
|
return response;
|
|
2060
2063
|
} catch (error) {
|
|
2061
2064
|
this.logError("readFile", error);
|
|
@@ -2068,14 +2071,14 @@ var FileClient = class extends BaseHttpClient {
|
|
|
2068
2071
|
* @param path - File path to stream
|
|
2069
2072
|
* @param sessionId - The session ID for this operation
|
|
2070
2073
|
*/
|
|
2071
|
-
async readFileStream(path, sessionId) {
|
|
2074
|
+
async readFileStream(path$1, sessionId) {
|
|
2072
2075
|
try {
|
|
2073
2076
|
const data = {
|
|
2074
|
-
path,
|
|
2077
|
+
path: path$1,
|
|
2075
2078
|
sessionId
|
|
2076
2079
|
};
|
|
2077
2080
|
const stream = await this.doStreamFetch("/api/read/stream", data);
|
|
2078
|
-
this.logSuccess("File stream started", path);
|
|
2081
|
+
this.logSuccess("File stream started", path$1);
|
|
2079
2082
|
return stream;
|
|
2080
2083
|
} catch (error) {
|
|
2081
2084
|
this.logError("readFileStream", error);
|
|
@@ -2087,14 +2090,14 @@ var FileClient = class extends BaseHttpClient {
|
|
|
2087
2090
|
* @param path - File path to delete
|
|
2088
2091
|
* @param sessionId - The session ID for this operation
|
|
2089
2092
|
*/
|
|
2090
|
-
async deleteFile(path, sessionId) {
|
|
2093
|
+
async deleteFile(path$1, sessionId) {
|
|
2091
2094
|
try {
|
|
2092
2095
|
const data = {
|
|
2093
|
-
path,
|
|
2096
|
+
path: path$1,
|
|
2094
2097
|
sessionId
|
|
2095
2098
|
};
|
|
2096
2099
|
const response = await this.post("/api/delete", data);
|
|
2097
|
-
this.logSuccess("File deleted", path);
|
|
2100
|
+
this.logSuccess("File deleted", path$1);
|
|
2098
2101
|
return response;
|
|
2099
2102
|
} catch (error) {
|
|
2100
2103
|
this.logError("deleteFile", error);
|
|
@@ -2107,15 +2110,15 @@ var FileClient = class extends BaseHttpClient {
|
|
|
2107
2110
|
* @param newPath - New file path
|
|
2108
2111
|
* @param sessionId - The session ID for this operation
|
|
2109
2112
|
*/
|
|
2110
|
-
async renameFile(path, newPath, sessionId) {
|
|
2113
|
+
async renameFile(path$1, newPath, sessionId) {
|
|
2111
2114
|
try {
|
|
2112
2115
|
const data = {
|
|
2113
|
-
oldPath: path,
|
|
2116
|
+
oldPath: path$1,
|
|
2114
2117
|
newPath,
|
|
2115
2118
|
sessionId
|
|
2116
2119
|
};
|
|
2117
2120
|
const response = await this.post("/api/rename", data);
|
|
2118
|
-
this.logSuccess("File renamed", `${path} -> ${newPath}`);
|
|
2121
|
+
this.logSuccess("File renamed", `${path$1} -> ${newPath}`);
|
|
2119
2122
|
return response;
|
|
2120
2123
|
} catch (error) {
|
|
2121
2124
|
this.logError("renameFile", error);
|
|
@@ -2128,15 +2131,15 @@ var FileClient = class extends BaseHttpClient {
|
|
|
2128
2131
|
* @param newPath - Destination file path
|
|
2129
2132
|
* @param sessionId - The session ID for this operation
|
|
2130
2133
|
*/
|
|
2131
|
-
async moveFile(path, newPath, sessionId) {
|
|
2134
|
+
async moveFile(path$1, newPath, sessionId) {
|
|
2132
2135
|
try {
|
|
2133
2136
|
const data = {
|
|
2134
|
-
sourcePath: path,
|
|
2137
|
+
sourcePath: path$1,
|
|
2135
2138
|
destinationPath: newPath,
|
|
2136
2139
|
sessionId
|
|
2137
2140
|
};
|
|
2138
2141
|
const response = await this.post("/api/move", data);
|
|
2139
|
-
this.logSuccess("File moved", `${path} -> ${newPath}`);
|
|
2142
|
+
this.logSuccess("File moved", `${path$1} -> ${newPath}`);
|
|
2140
2143
|
return response;
|
|
2141
2144
|
} catch (error) {
|
|
2142
2145
|
this.logError("moveFile", error);
|
|
@@ -2149,15 +2152,15 @@ var FileClient = class extends BaseHttpClient {
|
|
|
2149
2152
|
* @param sessionId - The session ID for this operation
|
|
2150
2153
|
* @param options - Optional settings (recursive, includeHidden)
|
|
2151
2154
|
*/
|
|
2152
|
-
async listFiles(path, sessionId, options) {
|
|
2155
|
+
async listFiles(path$1, sessionId, options) {
|
|
2153
2156
|
try {
|
|
2154
2157
|
const data = {
|
|
2155
|
-
path,
|
|
2158
|
+
path: path$1,
|
|
2156
2159
|
sessionId,
|
|
2157
2160
|
options: options || {}
|
|
2158
2161
|
};
|
|
2159
2162
|
const response = await this.post("/api/list-files", data);
|
|
2160
|
-
this.logSuccess("Files listed", `${path} (${response.count} files)`);
|
|
2163
|
+
this.logSuccess("Files listed", `${path$1} (${response.count} files)`);
|
|
2161
2164
|
return response;
|
|
2162
2165
|
} catch (error) {
|
|
2163
2166
|
this.logError("listFiles", error);
|
|
@@ -2169,14 +2172,14 @@ var FileClient = class extends BaseHttpClient {
|
|
|
2169
2172
|
* @param path - Path to check
|
|
2170
2173
|
* @param sessionId - The session ID for this operation
|
|
2171
2174
|
*/
|
|
2172
|
-
async exists(path, sessionId) {
|
|
2175
|
+
async exists(path$1, sessionId) {
|
|
2173
2176
|
try {
|
|
2174
2177
|
const data = {
|
|
2175
|
-
path,
|
|
2178
|
+
path: path$1,
|
|
2176
2179
|
sessionId
|
|
2177
2180
|
};
|
|
2178
2181
|
const response = await this.post("/api/exists", data);
|
|
2179
|
-
this.logSuccess("Path existence checked", `${path} (exists: ${response.exists})`);
|
|
2182
|
+
this.logSuccess("Path existence checked", `${path$1} (exists: ${response.exists})`);
|
|
2180
2183
|
return response;
|
|
2181
2184
|
} catch (error) {
|
|
2182
2185
|
this.logError("exists", error);
|
|
@@ -3026,6 +3029,401 @@ var CodeInterpreter = class {
|
|
|
3026
3029
|
}
|
|
3027
3030
|
};
|
|
3028
3031
|
|
|
3032
|
+
//#endregion
|
|
3033
|
+
//#region src/sse-parser.ts
|
|
3034
|
+
/**
|
|
3035
|
+
* Server-Sent Events (SSE) parser for streaming responses
|
|
3036
|
+
* Converts ReadableStream<Uint8Array> to typed AsyncIterable<T>
|
|
3037
|
+
*/
|
|
3038
|
+
/**
|
|
3039
|
+
* Parse a ReadableStream of SSE events into typed AsyncIterable
|
|
3040
|
+
* @param stream - The ReadableStream from fetch response
|
|
3041
|
+
* @param signal - Optional AbortSignal for cancellation
|
|
3042
|
+
*/
|
|
3043
|
+
async function* parseSSEStream(stream, signal) {
|
|
3044
|
+
const reader = stream.getReader();
|
|
3045
|
+
const decoder = new TextDecoder();
|
|
3046
|
+
let buffer = "";
|
|
3047
|
+
let currentEvent = { data: [] };
|
|
3048
|
+
let isAborted = signal?.aborted ?? false;
|
|
3049
|
+
const emitEvent = (data) => {
|
|
3050
|
+
if (data === "[DONE]" || data.trim() === "") return;
|
|
3051
|
+
try {
|
|
3052
|
+
return JSON.parse(data);
|
|
3053
|
+
} catch {
|
|
3054
|
+
return;
|
|
3055
|
+
}
|
|
3056
|
+
};
|
|
3057
|
+
const onAbort = () => {
|
|
3058
|
+
isAborted = true;
|
|
3059
|
+
reader.cancel().catch(() => {});
|
|
3060
|
+
};
|
|
3061
|
+
if (signal && !signal.aborted) signal.addEventListener("abort", onAbort);
|
|
3062
|
+
try {
|
|
3063
|
+
while (true) {
|
|
3064
|
+
if (isAborted) throw new Error("Operation was aborted");
|
|
3065
|
+
const { done, value } = await reader.read();
|
|
3066
|
+
if (isAborted) throw new Error("Operation was aborted");
|
|
3067
|
+
if (done) break;
|
|
3068
|
+
buffer += decoder.decode(value, { stream: true });
|
|
3069
|
+
const parsed = parseSSEFrames(buffer, currentEvent);
|
|
3070
|
+
buffer = parsed.remaining;
|
|
3071
|
+
currentEvent = parsed.currentEvent;
|
|
3072
|
+
for (const frame of parsed.events) {
|
|
3073
|
+
const event = emitEvent(frame.data);
|
|
3074
|
+
if (event !== void 0) yield event;
|
|
3075
|
+
}
|
|
3076
|
+
}
|
|
3077
|
+
if (isAborted) throw new Error("Operation was aborted");
|
|
3078
|
+
const finalParsed = parseSSEFrames(`${buffer}\n\n`, currentEvent);
|
|
3079
|
+
for (const frame of finalParsed.events) {
|
|
3080
|
+
const event = emitEvent(frame.data);
|
|
3081
|
+
if (event !== void 0) yield event;
|
|
3082
|
+
}
|
|
3083
|
+
} finally {
|
|
3084
|
+
if (signal) signal.removeEventListener("abort", onAbort);
|
|
3085
|
+
try {
|
|
3086
|
+
await reader.cancel();
|
|
3087
|
+
} catch {}
|
|
3088
|
+
reader.releaseLock();
|
|
3089
|
+
}
|
|
3090
|
+
}
|
|
3091
|
+
/**
|
|
3092
|
+
* Helper to convert a Response with SSE stream directly to AsyncIterable
|
|
3093
|
+
* @param response - Response object with SSE stream
|
|
3094
|
+
* @param signal - Optional AbortSignal for cancellation
|
|
3095
|
+
*/
|
|
3096
|
+
async function* responseToAsyncIterable(response, signal) {
|
|
3097
|
+
if (!response.ok) throw new Error(`Response not ok: ${response.status} ${response.statusText}`);
|
|
3098
|
+
if (!response.body) throw new Error("No response body");
|
|
3099
|
+
yield* parseSSEStream(response.body, signal);
|
|
3100
|
+
}
|
|
3101
|
+
/**
|
|
3102
|
+
* Create an SSE-formatted ReadableStream from an AsyncIterable
|
|
3103
|
+
* (Useful for Worker endpoints that need to forward AsyncIterable as SSE)
|
|
3104
|
+
* @param events - AsyncIterable of events
|
|
3105
|
+
* @param options - Stream options
|
|
3106
|
+
*/
|
|
3107
|
+
function asyncIterableToSSEStream(events, options) {
|
|
3108
|
+
const encoder = new TextEncoder();
|
|
3109
|
+
const serialize = options?.serialize || JSON.stringify;
|
|
3110
|
+
return new ReadableStream({
|
|
3111
|
+
async start(controller) {
|
|
3112
|
+
try {
|
|
3113
|
+
for await (const event of events) {
|
|
3114
|
+
if (options?.signal?.aborted) {
|
|
3115
|
+
controller.error(/* @__PURE__ */ new Error("Operation was aborted"));
|
|
3116
|
+
break;
|
|
3117
|
+
}
|
|
3118
|
+
const sseEvent = `data: ${serialize(event)}\n\n`;
|
|
3119
|
+
controller.enqueue(encoder.encode(sseEvent));
|
|
3120
|
+
}
|
|
3121
|
+
controller.enqueue(encoder.encode("data: [DONE]\n\n"));
|
|
3122
|
+
} catch (error) {
|
|
3123
|
+
controller.error(error);
|
|
3124
|
+
} finally {
|
|
3125
|
+
controller.close();
|
|
3126
|
+
}
|
|
3127
|
+
},
|
|
3128
|
+
cancel() {}
|
|
3129
|
+
});
|
|
3130
|
+
}
|
|
3131
|
+
|
|
3132
|
+
//#endregion
|
|
3133
|
+
//#region src/local-mount-sync.ts
|
|
3134
|
+
const DEFAULT_POLL_INTERVAL_MS = 1e3;
|
|
3135
|
+
const DEFAULT_ECHO_SUPPRESS_TTL_MS = 2e3;
|
|
3136
|
+
const MAX_BACKOFF_MS = 3e4;
|
|
3137
|
+
const SYNC_CONCURRENCY = 5;
|
|
3138
|
+
/**
|
|
3139
|
+
* Manages bidirectional sync between an R2 binding and a container directory.
|
|
3140
|
+
*
|
|
3141
|
+
* R2 -> Container: polls bucket.list() to detect changes, then transfers diffs.
|
|
3142
|
+
* Container -> R2: uses inotifywait via the watch API to detect file changes.
|
|
3143
|
+
*/
|
|
3144
|
+
var LocalMountSyncManager = class {
|
|
3145
|
+
bucket;
|
|
3146
|
+
mountPath;
|
|
3147
|
+
prefix;
|
|
3148
|
+
readOnly;
|
|
3149
|
+
client;
|
|
3150
|
+
sessionId;
|
|
3151
|
+
logger;
|
|
3152
|
+
pollIntervalMs;
|
|
3153
|
+
echoSuppressTtlMs;
|
|
3154
|
+
snapshot = /* @__PURE__ */ new Map();
|
|
3155
|
+
echoSuppressSet = /* @__PURE__ */ new Set();
|
|
3156
|
+
pollTimer = null;
|
|
3157
|
+
watchReconnectTimer = null;
|
|
3158
|
+
watchAbortController = null;
|
|
3159
|
+
running = false;
|
|
3160
|
+
consecutivePollFailures = 0;
|
|
3161
|
+
consecutiveWatchFailures = 0;
|
|
3162
|
+
constructor(options) {
|
|
3163
|
+
this.bucket = options.bucket;
|
|
3164
|
+
this.mountPath = options.mountPath;
|
|
3165
|
+
this.prefix = options.prefix;
|
|
3166
|
+
this.readOnly = options.readOnly;
|
|
3167
|
+
this.client = options.client;
|
|
3168
|
+
this.sessionId = options.sessionId;
|
|
3169
|
+
this.logger = options.logger.child({ operation: "local-mount-sync" });
|
|
3170
|
+
this.pollIntervalMs = options.pollIntervalMs ?? DEFAULT_POLL_INTERVAL_MS;
|
|
3171
|
+
this.echoSuppressTtlMs = options.echoSuppressTtlMs ?? DEFAULT_ECHO_SUPPRESS_TTL_MS;
|
|
3172
|
+
}
|
|
3173
|
+
/**
|
|
3174
|
+
* Start bidirectional sync. Performs initial full sync, then starts
|
|
3175
|
+
* the R2 poll loop and (if not readOnly) the container watch loop.
|
|
3176
|
+
*/
|
|
3177
|
+
async start() {
|
|
3178
|
+
this.running = true;
|
|
3179
|
+
await this.client.files.mkdir(this.mountPath, this.sessionId, { recursive: true });
|
|
3180
|
+
await this.fullSyncR2ToContainer();
|
|
3181
|
+
this.schedulePoll();
|
|
3182
|
+
if (!this.readOnly) this.startContainerWatch();
|
|
3183
|
+
this.logger.info("Local mount sync started", {
|
|
3184
|
+
mountPath: this.mountPath,
|
|
3185
|
+
prefix: this.prefix,
|
|
3186
|
+
readOnly: this.readOnly,
|
|
3187
|
+
pollIntervalMs: this.pollIntervalMs
|
|
3188
|
+
});
|
|
3189
|
+
}
|
|
3190
|
+
/**
|
|
3191
|
+
* Stop all sync activity and clean up resources.
|
|
3192
|
+
*/
|
|
3193
|
+
async stop() {
|
|
3194
|
+
this.running = false;
|
|
3195
|
+
if (this.pollTimer) {
|
|
3196
|
+
clearTimeout(this.pollTimer);
|
|
3197
|
+
this.pollTimer = null;
|
|
3198
|
+
}
|
|
3199
|
+
if (this.watchReconnectTimer) {
|
|
3200
|
+
clearTimeout(this.watchReconnectTimer);
|
|
3201
|
+
this.watchReconnectTimer = null;
|
|
3202
|
+
}
|
|
3203
|
+
if (this.watchAbortController) {
|
|
3204
|
+
this.watchAbortController.abort();
|
|
3205
|
+
this.watchAbortController = null;
|
|
3206
|
+
}
|
|
3207
|
+
this.snapshot.clear();
|
|
3208
|
+
this.echoSuppressSet.clear();
|
|
3209
|
+
this.logger.info("Local mount sync stopped", { mountPath: this.mountPath });
|
|
3210
|
+
}
|
|
3211
|
+
async fullSyncR2ToContainer() {
|
|
3212
|
+
const objects = await this.listAllR2Objects();
|
|
3213
|
+
const newSnapshot = /* @__PURE__ */ new Map();
|
|
3214
|
+
for (let i = 0; i < objects.length; i += SYNC_CONCURRENCY) {
|
|
3215
|
+
const batch = objects.slice(i, i + SYNC_CONCURRENCY);
|
|
3216
|
+
await Promise.all(batch.map(async (obj) => {
|
|
3217
|
+
const containerPath = this.r2KeyToContainerPath(obj.key);
|
|
3218
|
+
newSnapshot.set(obj.key, {
|
|
3219
|
+
etag: obj.etag,
|
|
3220
|
+
size: obj.size
|
|
3221
|
+
});
|
|
3222
|
+
await this.ensureParentDir(containerPath);
|
|
3223
|
+
await this.transferR2ObjectToContainer(obj.key, containerPath);
|
|
3224
|
+
}));
|
|
3225
|
+
}
|
|
3226
|
+
this.snapshot = newSnapshot;
|
|
3227
|
+
this.logger.debug("Initial R2 -> Container sync complete", { objectCount: objects.length });
|
|
3228
|
+
}
|
|
3229
|
+
schedulePoll() {
|
|
3230
|
+
if (!this.running) return;
|
|
3231
|
+
const backoffMs = this.consecutivePollFailures > 0 ? Math.min(this.pollIntervalMs * 2 ** this.consecutivePollFailures, MAX_BACKOFF_MS) : this.pollIntervalMs;
|
|
3232
|
+
this.pollTimer = setTimeout(async () => {
|
|
3233
|
+
try {
|
|
3234
|
+
await this.pollR2ForChanges();
|
|
3235
|
+
this.consecutivePollFailures = 0;
|
|
3236
|
+
} catch (error) {
|
|
3237
|
+
this.consecutivePollFailures++;
|
|
3238
|
+
this.logger.error("R2 poll cycle failed", error instanceof Error ? error : new Error(String(error)));
|
|
3239
|
+
}
|
|
3240
|
+
this.schedulePoll();
|
|
3241
|
+
}, backoffMs);
|
|
3242
|
+
}
|
|
3243
|
+
async pollR2ForChanges() {
|
|
3244
|
+
const objects = await this.listAllR2Objects();
|
|
3245
|
+
const newSnapshot = /* @__PURE__ */ new Map();
|
|
3246
|
+
const changed = [];
|
|
3247
|
+
for (const obj of objects) {
|
|
3248
|
+
newSnapshot.set(obj.key, {
|
|
3249
|
+
etag: obj.etag,
|
|
3250
|
+
size: obj.size
|
|
3251
|
+
});
|
|
3252
|
+
const existing = this.snapshot.get(obj.key);
|
|
3253
|
+
if (!existing || existing.etag !== obj.etag) changed.push({
|
|
3254
|
+
key: obj.key,
|
|
3255
|
+
action: existing ? "modified" : "created"
|
|
3256
|
+
});
|
|
3257
|
+
}
|
|
3258
|
+
for (let i = 0; i < changed.length; i += SYNC_CONCURRENCY) {
|
|
3259
|
+
const batch = changed.slice(i, i + SYNC_CONCURRENCY);
|
|
3260
|
+
await Promise.all(batch.map(async ({ key, action }) => {
|
|
3261
|
+
try {
|
|
3262
|
+
const containerPath = this.r2KeyToContainerPath(key);
|
|
3263
|
+
await this.ensureParentDir(containerPath);
|
|
3264
|
+
this.suppressEcho(containerPath);
|
|
3265
|
+
await this.transferR2ObjectToContainer(key, containerPath);
|
|
3266
|
+
this.logger.debug("R2 -> Container: synced object", {
|
|
3267
|
+
key,
|
|
3268
|
+
action
|
|
3269
|
+
});
|
|
3270
|
+
} catch (error) {
|
|
3271
|
+
this.logger.error(`R2 -> Container: failed to sync object ${key}`, error instanceof Error ? error : new Error(String(error)));
|
|
3272
|
+
}
|
|
3273
|
+
}));
|
|
3274
|
+
}
|
|
3275
|
+
for (const [key] of this.snapshot) if (!newSnapshot.has(key)) {
|
|
3276
|
+
const containerPath = this.r2KeyToContainerPath(key);
|
|
3277
|
+
this.suppressEcho(containerPath);
|
|
3278
|
+
try {
|
|
3279
|
+
await this.client.files.deleteFile(containerPath, this.sessionId);
|
|
3280
|
+
this.logger.debug("R2 -> Container: deleted file", { key });
|
|
3281
|
+
} catch (error) {
|
|
3282
|
+
this.logger.error("R2 -> Container: failed to delete", error instanceof Error ? error : new Error(String(error)));
|
|
3283
|
+
}
|
|
3284
|
+
}
|
|
3285
|
+
this.snapshot = newSnapshot;
|
|
3286
|
+
}
|
|
3287
|
+
async listAllR2Objects() {
|
|
3288
|
+
const results = [];
|
|
3289
|
+
let cursor;
|
|
3290
|
+
do {
|
|
3291
|
+
const listResult = await this.bucket.list({
|
|
3292
|
+
...this.prefix && { prefix: this.prefix },
|
|
3293
|
+
...cursor && { cursor }
|
|
3294
|
+
});
|
|
3295
|
+
for (const obj of listResult.objects) results.push({
|
|
3296
|
+
key: obj.key,
|
|
3297
|
+
etag: obj.etag,
|
|
3298
|
+
size: obj.size
|
|
3299
|
+
});
|
|
3300
|
+
cursor = listResult.truncated ? listResult.cursor : void 0;
|
|
3301
|
+
} while (cursor);
|
|
3302
|
+
return results;
|
|
3303
|
+
}
|
|
3304
|
+
async transferR2ObjectToContainer(key, containerPath) {
|
|
3305
|
+
const obj = await this.bucket.get(key);
|
|
3306
|
+
if (!obj) return;
|
|
3307
|
+
const arrayBuffer = await obj.arrayBuffer();
|
|
3308
|
+
const base64 = uint8ArrayToBase64(new Uint8Array(arrayBuffer));
|
|
3309
|
+
await this.client.files.writeFile(containerPath, base64, this.sessionId, { encoding: "base64" });
|
|
3310
|
+
}
|
|
3311
|
+
async ensureParentDir(containerPath) {
|
|
3312
|
+
const parentDir = containerPath.substring(0, containerPath.lastIndexOf("/"));
|
|
3313
|
+
if (parentDir && parentDir !== this.mountPath) await this.client.files.mkdir(parentDir, this.sessionId, { recursive: true });
|
|
3314
|
+
}
|
|
3315
|
+
startContainerWatch() {
|
|
3316
|
+
this.watchAbortController = new AbortController();
|
|
3317
|
+
this.runWatchWithRetry();
|
|
3318
|
+
}
|
|
3319
|
+
runWatchWithRetry() {
|
|
3320
|
+
if (!this.running) return;
|
|
3321
|
+
this.runContainerWatchLoop().then(() => {
|
|
3322
|
+
this.consecutiveWatchFailures = 0;
|
|
3323
|
+
this.scheduleWatchReconnect();
|
|
3324
|
+
}).catch((error) => {
|
|
3325
|
+
if (!this.running) return;
|
|
3326
|
+
this.consecutiveWatchFailures++;
|
|
3327
|
+
this.logger.error("Container watch loop failed", error instanceof Error ? error : new Error(String(error)));
|
|
3328
|
+
this.scheduleWatchReconnect();
|
|
3329
|
+
});
|
|
3330
|
+
}
|
|
3331
|
+
scheduleWatchReconnect() {
|
|
3332
|
+
if (!this.running) return;
|
|
3333
|
+
const backoffMs = this.consecutiveWatchFailures > 0 ? Math.min(this.pollIntervalMs * 2 ** this.consecutiveWatchFailures, MAX_BACKOFF_MS) : this.pollIntervalMs;
|
|
3334
|
+
this.logger.debug("Reconnecting container watch", {
|
|
3335
|
+
backoffMs,
|
|
3336
|
+
failures: this.consecutiveWatchFailures
|
|
3337
|
+
});
|
|
3338
|
+
this.watchReconnectTimer = setTimeout(() => {
|
|
3339
|
+
this.watchReconnectTimer = null;
|
|
3340
|
+
if (!this.running) return;
|
|
3341
|
+
this.watchAbortController = new AbortController();
|
|
3342
|
+
this.runWatchWithRetry();
|
|
3343
|
+
}, backoffMs);
|
|
3344
|
+
}
|
|
3345
|
+
async runContainerWatchLoop() {
|
|
3346
|
+
const stream = await this.client.watch.watch({
|
|
3347
|
+
path: this.mountPath,
|
|
3348
|
+
recursive: true,
|
|
3349
|
+
sessionId: this.sessionId
|
|
3350
|
+
});
|
|
3351
|
+
for await (const event of parseSSEStream(stream, this.watchAbortController?.signal)) {
|
|
3352
|
+
if (!this.running) break;
|
|
3353
|
+
this.consecutiveWatchFailures = 0;
|
|
3354
|
+
if (event.type !== "event") continue;
|
|
3355
|
+
if (event.isDirectory) continue;
|
|
3356
|
+
const containerPath = event.path;
|
|
3357
|
+
if (this.echoSuppressSet.has(containerPath)) continue;
|
|
3358
|
+
const r2Key = this.containerPathToR2Key(containerPath);
|
|
3359
|
+
if (!r2Key) continue;
|
|
3360
|
+
try {
|
|
3361
|
+
switch (event.eventType) {
|
|
3362
|
+
case "create":
|
|
3363
|
+
case "modify":
|
|
3364
|
+
case "move_to":
|
|
3365
|
+
await this.uploadFileToR2(containerPath, r2Key);
|
|
3366
|
+
this.logger.debug("Container -> R2: synced file", {
|
|
3367
|
+
path: containerPath,
|
|
3368
|
+
key: r2Key,
|
|
3369
|
+
action: event.eventType
|
|
3370
|
+
});
|
|
3371
|
+
break;
|
|
3372
|
+
case "delete":
|
|
3373
|
+
case "move_from":
|
|
3374
|
+
await this.bucket.delete(r2Key);
|
|
3375
|
+
this.snapshot.delete(r2Key);
|
|
3376
|
+
this.logger.debug("Container -> R2: deleted object", {
|
|
3377
|
+
path: containerPath,
|
|
3378
|
+
key: r2Key
|
|
3379
|
+
});
|
|
3380
|
+
break;
|
|
3381
|
+
}
|
|
3382
|
+
} catch (error) {
|
|
3383
|
+
this.logger.error(`Container -> R2 sync failed for ${containerPath}`, error instanceof Error ? error : new Error(String(error)));
|
|
3384
|
+
}
|
|
3385
|
+
}
|
|
3386
|
+
}
|
|
3387
|
+
/**
|
|
3388
|
+
* Read a container file and upload it to R2, then update the local
|
|
3389
|
+
* snapshot so the next poll cycle doesn't echo the write back.
|
|
3390
|
+
*/
|
|
3391
|
+
async uploadFileToR2(containerPath, r2Key) {
|
|
3392
|
+
const bytes = base64ToUint8Array((await this.client.files.readFile(containerPath, this.sessionId, { encoding: "base64" })).content);
|
|
3393
|
+
await this.bucket.put(r2Key, bytes);
|
|
3394
|
+
const head = await this.bucket.head(r2Key);
|
|
3395
|
+
if (head) this.snapshot.set(r2Key, {
|
|
3396
|
+
etag: head.etag,
|
|
3397
|
+
size: head.size
|
|
3398
|
+
});
|
|
3399
|
+
}
|
|
3400
|
+
suppressEcho(containerPath) {
|
|
3401
|
+
this.echoSuppressSet.add(containerPath);
|
|
3402
|
+
setTimeout(() => {
|
|
3403
|
+
this.echoSuppressSet.delete(containerPath);
|
|
3404
|
+
}, this.echoSuppressTtlMs);
|
|
3405
|
+
}
|
|
3406
|
+
r2KeyToContainerPath(key) {
|
|
3407
|
+
let relativePath = key;
|
|
3408
|
+
if (this.prefix) relativePath = key.startsWith(this.prefix) ? key.slice(this.prefix.length) : key;
|
|
3409
|
+
return path.join(this.mountPath, relativePath);
|
|
3410
|
+
}
|
|
3411
|
+
containerPathToR2Key(containerPath) {
|
|
3412
|
+
const resolved = path.resolve(containerPath);
|
|
3413
|
+
const mount = path.resolve(this.mountPath);
|
|
3414
|
+
if (!resolved.startsWith(mount)) return null;
|
|
3415
|
+
const relativePath = path.relative(mount, resolved);
|
|
3416
|
+
if (!relativePath || relativePath.startsWith("..")) return null;
|
|
3417
|
+
return this.prefix ? path.join(this.prefix, relativePath) : relativePath;
|
|
3418
|
+
}
|
|
3419
|
+
};
|
|
3420
|
+
function uint8ArrayToBase64(bytes) {
|
|
3421
|
+
return Buffer.from(bytes).toString("base64");
|
|
3422
|
+
}
|
|
3423
|
+
function base64ToUint8Array(base64) {
|
|
3424
|
+
return new Uint8Array(Buffer.from(base64, "base64"));
|
|
3425
|
+
}
|
|
3426
|
+
|
|
3029
3427
|
//#endregion
|
|
3030
3428
|
//#region src/pty/proxy.ts
|
|
3031
3429
|
async function proxyTerminal(stub, sessionId, request, options) {
|
|
@@ -3052,14 +3450,14 @@ async function proxyToSandbox(request, env) {
|
|
|
3052
3450
|
const url = new URL(request.url);
|
|
3053
3451
|
const routeInfo = extractSandboxRoute(url);
|
|
3054
3452
|
if (!routeInfo) return null;
|
|
3055
|
-
const { sandboxId, port, path, token } = routeInfo;
|
|
3453
|
+
const { sandboxId, port, path: path$1, token } = routeInfo;
|
|
3056
3454
|
const sandbox = getSandbox(env.Sandbox, sandboxId, { normalizeId: true });
|
|
3057
3455
|
if (port !== 3e3) {
|
|
3058
3456
|
if (!await sandbox.validatePortToken(port, token)) {
|
|
3059
3457
|
logger.warn("Invalid token access blocked", {
|
|
3060
3458
|
port,
|
|
3061
3459
|
sandboxId,
|
|
3062
|
-
path,
|
|
3460
|
+
path: path$1,
|
|
3063
3461
|
hostname: url.hostname,
|
|
3064
3462
|
url: request.url,
|
|
3065
3463
|
method: request.method,
|
|
@@ -3076,8 +3474,8 @@ async function proxyToSandbox(request, env) {
|
|
|
3076
3474
|
}
|
|
3077
3475
|
if (request.headers.get("Upgrade")?.toLowerCase() === "websocket") return await sandbox.fetch(switchPort(request, port));
|
|
3078
3476
|
let proxyUrl;
|
|
3079
|
-
if (port !== 3e3) proxyUrl = `http://localhost:${port}${path}${url.search}`;
|
|
3080
|
-
else proxyUrl = `http://localhost:3000${path}${url.search}`;
|
|
3477
|
+
if (port !== 3e3) proxyUrl = `http://localhost:${port}${path$1}${url.search}`;
|
|
3478
|
+
else proxyUrl = `http://localhost:3000${path$1}${url.search}`;
|
|
3081
3479
|
const headers = {
|
|
3082
3480
|
"X-Original-URL": request.url,
|
|
3083
3481
|
"X-Forwarded-Host": url.hostname,
|
|
@@ -3139,106 +3537,6 @@ function isLocalhostPattern(hostname) {
|
|
|
3139
3537
|
return hostPart === "localhost" || hostPart === "127.0.0.1" || hostPart === "0.0.0.0";
|
|
3140
3538
|
}
|
|
3141
3539
|
|
|
3142
|
-
//#endregion
|
|
3143
|
-
//#region src/sse-parser.ts
|
|
3144
|
-
/**
|
|
3145
|
-
* Server-Sent Events (SSE) parser for streaming responses
|
|
3146
|
-
* Converts ReadableStream<Uint8Array> to typed AsyncIterable<T>
|
|
3147
|
-
*/
|
|
3148
|
-
/**
|
|
3149
|
-
* Parse a ReadableStream of SSE events into typed AsyncIterable
|
|
3150
|
-
* @param stream - The ReadableStream from fetch response
|
|
3151
|
-
* @param signal - Optional AbortSignal for cancellation
|
|
3152
|
-
*/
|
|
3153
|
-
async function* parseSSEStream(stream, signal) {
|
|
3154
|
-
const reader = stream.getReader();
|
|
3155
|
-
const decoder = new TextDecoder();
|
|
3156
|
-
let buffer = "";
|
|
3157
|
-
let currentEvent = { data: [] };
|
|
3158
|
-
let isAborted = signal?.aborted ?? false;
|
|
3159
|
-
const emitEvent = (data) => {
|
|
3160
|
-
if (data === "[DONE]" || data.trim() === "") return;
|
|
3161
|
-
try {
|
|
3162
|
-
return JSON.parse(data);
|
|
3163
|
-
} catch {
|
|
3164
|
-
return;
|
|
3165
|
-
}
|
|
3166
|
-
};
|
|
3167
|
-
const onAbort = () => {
|
|
3168
|
-
isAborted = true;
|
|
3169
|
-
reader.cancel().catch(() => {});
|
|
3170
|
-
};
|
|
3171
|
-
if (signal && !signal.aborted) signal.addEventListener("abort", onAbort);
|
|
3172
|
-
try {
|
|
3173
|
-
while (true) {
|
|
3174
|
-
if (isAborted) throw new Error("Operation was aborted");
|
|
3175
|
-
const { done, value } = await reader.read();
|
|
3176
|
-
if (isAborted) throw new Error("Operation was aborted");
|
|
3177
|
-
if (done) break;
|
|
3178
|
-
buffer += decoder.decode(value, { stream: true });
|
|
3179
|
-
const parsed = parseSSEFrames(buffer, currentEvent);
|
|
3180
|
-
buffer = parsed.remaining;
|
|
3181
|
-
currentEvent = parsed.currentEvent;
|
|
3182
|
-
for (const frame of parsed.events) {
|
|
3183
|
-
const event = emitEvent(frame.data);
|
|
3184
|
-
if (event !== void 0) yield event;
|
|
3185
|
-
}
|
|
3186
|
-
}
|
|
3187
|
-
if (isAborted) throw new Error("Operation was aborted");
|
|
3188
|
-
const finalParsed = parseSSEFrames(`${buffer}\n\n`, currentEvent);
|
|
3189
|
-
for (const frame of finalParsed.events) {
|
|
3190
|
-
const event = emitEvent(frame.data);
|
|
3191
|
-
if (event !== void 0) yield event;
|
|
3192
|
-
}
|
|
3193
|
-
} finally {
|
|
3194
|
-
if (signal) signal.removeEventListener("abort", onAbort);
|
|
3195
|
-
try {
|
|
3196
|
-
await reader.cancel();
|
|
3197
|
-
} catch {}
|
|
3198
|
-
reader.releaseLock();
|
|
3199
|
-
}
|
|
3200
|
-
}
|
|
3201
|
-
/**
|
|
3202
|
-
* Helper to convert a Response with SSE stream directly to AsyncIterable
|
|
3203
|
-
* @param response - Response object with SSE stream
|
|
3204
|
-
* @param signal - Optional AbortSignal for cancellation
|
|
3205
|
-
*/
|
|
3206
|
-
async function* responseToAsyncIterable(response, signal) {
|
|
3207
|
-
if (!response.ok) throw new Error(`Response not ok: ${response.status} ${response.statusText}`);
|
|
3208
|
-
if (!response.body) throw new Error("No response body");
|
|
3209
|
-
yield* parseSSEStream(response.body, signal);
|
|
3210
|
-
}
|
|
3211
|
-
/**
|
|
3212
|
-
* Create an SSE-formatted ReadableStream from an AsyncIterable
|
|
3213
|
-
* (Useful for Worker endpoints that need to forward AsyncIterable as SSE)
|
|
3214
|
-
* @param events - AsyncIterable of events
|
|
3215
|
-
* @param options - Stream options
|
|
3216
|
-
*/
|
|
3217
|
-
function asyncIterableToSSEStream(events, options) {
|
|
3218
|
-
const encoder = new TextEncoder();
|
|
3219
|
-
const serialize = options?.serialize || JSON.stringify;
|
|
3220
|
-
return new ReadableStream({
|
|
3221
|
-
async start(controller) {
|
|
3222
|
-
try {
|
|
3223
|
-
for await (const event of events) {
|
|
3224
|
-
if (options?.signal?.aborted) {
|
|
3225
|
-
controller.error(/* @__PURE__ */ new Error("Operation was aborted"));
|
|
3226
|
-
break;
|
|
3227
|
-
}
|
|
3228
|
-
const sseEvent = `data: ${serialize(event)}\n\n`;
|
|
3229
|
-
controller.enqueue(encoder.encode(sseEvent));
|
|
3230
|
-
}
|
|
3231
|
-
controller.enqueue(encoder.encode("data: [DONE]\n\n"));
|
|
3232
|
-
} catch (error) {
|
|
3233
|
-
controller.error(error);
|
|
3234
|
-
} finally {
|
|
3235
|
-
controller.close();
|
|
3236
|
-
}
|
|
3237
|
-
},
|
|
3238
|
-
cancel() {}
|
|
3239
|
-
});
|
|
3240
|
-
}
|
|
3241
|
-
|
|
3242
3540
|
//#endregion
|
|
3243
3541
|
//#region src/storage-mount/errors.ts
|
|
3244
3542
|
/**
|
|
@@ -3388,21 +3686,79 @@ function buildS3fsSource(bucket, prefix) {
|
|
|
3388
3686
|
* This file is auto-updated by .github/changeset-version.ts during releases
|
|
3389
3687
|
* DO NOT EDIT MANUALLY - Changes will be overwritten on the next version bump
|
|
3390
3688
|
*/
|
|
3391
|
-
const SDK_VERSION = "0.7.
|
|
3689
|
+
const SDK_VERSION = "0.7.18";
|
|
3392
3690
|
|
|
3393
3691
|
//#endregion
|
|
3394
3692
|
//#region src/sandbox.ts
|
|
3693
|
+
const sandboxConfigurationCache = /* @__PURE__ */ new WeakMap();
|
|
3694
|
+
function getNamespaceConfigurationCache(namespace) {
|
|
3695
|
+
const existing = sandboxConfigurationCache.get(namespace);
|
|
3696
|
+
if (existing) return existing;
|
|
3697
|
+
const created = /* @__PURE__ */ new Map();
|
|
3698
|
+
sandboxConfigurationCache.set(namespace, created);
|
|
3699
|
+
return created;
|
|
3700
|
+
}
|
|
3701
|
+
function sameContainerTimeouts(left, right) {
|
|
3702
|
+
return left?.instanceGetTimeoutMS === right?.instanceGetTimeoutMS && left?.portReadyTimeoutMS === right?.portReadyTimeoutMS && left?.waitIntervalMS === right?.waitIntervalMS;
|
|
3703
|
+
}
|
|
3704
|
+
function buildSandboxConfiguration(effectiveId, options, cached) {
|
|
3705
|
+
const configuration = {};
|
|
3706
|
+
if (cached?.sandboxName !== effectiveId || cached.normalizeId !== options?.normalizeId) configuration.sandboxName = {
|
|
3707
|
+
name: effectiveId,
|
|
3708
|
+
normalizeId: options?.normalizeId
|
|
3709
|
+
};
|
|
3710
|
+
if (options?.baseUrl !== void 0 && cached?.baseUrl !== options.baseUrl) configuration.baseUrl = options.baseUrl;
|
|
3711
|
+
if (options?.sleepAfter !== void 0 && cached?.sleepAfter !== options.sleepAfter) configuration.sleepAfter = options.sleepAfter;
|
|
3712
|
+
if (options?.keepAlive !== void 0 && cached?.keepAlive !== options.keepAlive) configuration.keepAlive = options.keepAlive;
|
|
3713
|
+
if (options?.containerTimeouts && !sameContainerTimeouts(cached?.containerTimeouts, options.containerTimeouts)) configuration.containerTimeouts = options.containerTimeouts;
|
|
3714
|
+
return configuration;
|
|
3715
|
+
}
|
|
3716
|
+
function hasSandboxConfiguration(configuration) {
|
|
3717
|
+
return configuration.sandboxName !== void 0 || configuration.baseUrl !== void 0 || configuration.sleepAfter !== void 0 || configuration.keepAlive !== void 0 || configuration.containerTimeouts !== void 0;
|
|
3718
|
+
}
|
|
3719
|
+
function mergeSandboxConfiguration(cached, configuration) {
|
|
3720
|
+
return {
|
|
3721
|
+
...cached,
|
|
3722
|
+
...configuration.sandboxName && {
|
|
3723
|
+
sandboxName: configuration.sandboxName.name,
|
|
3724
|
+
normalizeId: configuration.sandboxName.normalizeId
|
|
3725
|
+
},
|
|
3726
|
+
...configuration.baseUrl !== void 0 && { baseUrl: configuration.baseUrl },
|
|
3727
|
+
...configuration.sleepAfter !== void 0 && { sleepAfter: configuration.sleepAfter },
|
|
3728
|
+
...configuration.keepAlive !== void 0 && { keepAlive: configuration.keepAlive },
|
|
3729
|
+
...configuration.containerTimeouts !== void 0 && { containerTimeouts: configuration.containerTimeouts }
|
|
3730
|
+
};
|
|
3731
|
+
}
|
|
3732
|
+
function applySandboxConfiguration(stub, configuration) {
|
|
3733
|
+
if (stub.configure) return stub.configure(configuration);
|
|
3734
|
+
const operations = [];
|
|
3735
|
+
if (configuration.sandboxName) operations.push(stub.setSandboxName?.(configuration.sandboxName.name, configuration.sandboxName.normalizeId) ?? Promise.resolve());
|
|
3736
|
+
if (configuration.baseUrl !== void 0) operations.push(stub.setBaseUrl?.(configuration.baseUrl) ?? Promise.resolve());
|
|
3737
|
+
if (configuration.sleepAfter !== void 0) operations.push(stub.setSleepAfter?.(configuration.sleepAfter) ?? Promise.resolve());
|
|
3738
|
+
if (configuration.keepAlive !== void 0) operations.push(stub.setKeepAlive?.(configuration.keepAlive) ?? Promise.resolve());
|
|
3739
|
+
if (configuration.containerTimeouts !== void 0) operations.push(stub.setContainerTimeouts?.(configuration.containerTimeouts) ?? Promise.resolve());
|
|
3740
|
+
return Promise.all(operations).then(() => void 0);
|
|
3741
|
+
}
|
|
3395
3742
|
function getSandbox(ns, id, options) {
|
|
3396
3743
|
const sanitizedId = sanitizeSandboxId(id);
|
|
3397
3744
|
const effectiveId = options?.normalizeId ? sanitizedId.toLowerCase() : sanitizedId;
|
|
3398
3745
|
const hasUppercase = /[A-Z]/.test(sanitizedId);
|
|
3399
3746
|
if (!options?.normalizeId && hasUppercase) createLogger({ component: "sandbox-do" }).warn(`Sandbox ID "${sanitizedId}" contains uppercase letters, which causes issues with preview URLs (hostnames are case-insensitive). normalizeId will default to true in a future version to prevent this. Use lowercase IDs or pass { normalizeId: true } to prepare.`);
|
|
3400
3747
|
const stub = getContainer(ns, effectiveId);
|
|
3401
|
-
|
|
3402
|
-
|
|
3403
|
-
|
|
3404
|
-
if (
|
|
3405
|
-
|
|
3748
|
+
const namespaceCache = getNamespaceConfigurationCache(ns);
|
|
3749
|
+
const cachedConfiguration = namespaceCache.get(effectiveId);
|
|
3750
|
+
const configuration = buildSandboxConfiguration(effectiveId, options, cachedConfiguration);
|
|
3751
|
+
if (hasSandboxConfiguration(configuration)) {
|
|
3752
|
+
const nextConfiguration = mergeSandboxConfiguration(cachedConfiguration, configuration);
|
|
3753
|
+
namespaceCache.set(effectiveId, nextConfiguration);
|
|
3754
|
+
applySandboxConfiguration(stub, configuration).catch(() => {
|
|
3755
|
+
if (cachedConfiguration) {
|
|
3756
|
+
namespaceCache.set(effectiveId, cachedConfiguration);
|
|
3757
|
+
return;
|
|
3758
|
+
}
|
|
3759
|
+
namespaceCache.delete(effectiveId);
|
|
3760
|
+
});
|
|
3761
|
+
}
|
|
3406
3762
|
const defaultSessionId = `sandbox-${effectiveId}`;
|
|
3407
3763
|
const enhancedMethods = {
|
|
3408
3764
|
fetch: (request) => stub.fetch(request),
|
|
@@ -3616,6 +3972,13 @@ var Sandbox = class Sandbox extends Container {
|
|
|
3616
3972
|
await this.ctx.storage.put("normalizeId", this.normalizeId);
|
|
3617
3973
|
}
|
|
3618
3974
|
}
|
|
3975
|
+
async configure(configuration) {
|
|
3976
|
+
if (configuration.sandboxName) await this.setSandboxName(configuration.sandboxName.name, configuration.sandboxName.normalizeId);
|
|
3977
|
+
if (configuration.baseUrl !== void 0) await this.setBaseUrl(configuration.baseUrl);
|
|
3978
|
+
if (configuration.sleepAfter !== void 0) await this.setSleepAfter(configuration.sleepAfter);
|
|
3979
|
+
if (configuration.keepAlive !== void 0) await this.setKeepAlive(configuration.keepAlive);
|
|
3980
|
+
if (configuration.containerTimeouts !== void 0) await this.setContainerTimeouts(configuration.containerTimeouts);
|
|
3981
|
+
}
|
|
3619
3982
|
async setBaseUrl(baseUrl) {
|
|
3620
3983
|
if (!this.baseUrl) {
|
|
3621
3984
|
this.baseUrl = baseUrl;
|
|
@@ -3698,8 +4061,67 @@ var Sandbox = class Sandbox extends Container {
|
|
|
3698
4061
|
waitIntervalMS: parseAndValidate(getEnvString(env, "SANDBOX_POLL_INTERVAL_MS"), "waitIntervalMS", 100, 5e3)
|
|
3699
4062
|
};
|
|
3700
4063
|
}
|
|
4064
|
+
/**
|
|
4065
|
+
* Mount an S3-compatible bucket as a local directory.
|
|
4066
|
+
*
|
|
4067
|
+
* Requires explicit endpoint URL for production. Credentials are auto-detected from environment
|
|
4068
|
+
* variables or can be provided explicitly.
|
|
4069
|
+
*
|
|
4070
|
+
* @param bucket - Bucket name (or R2 binding name when localBucket is true)
|
|
4071
|
+
* @param mountPath - Absolute path in container to mount at
|
|
4072
|
+
* @param options - Mount configuration
|
|
4073
|
+
* @throws MissingCredentialsError if no credentials found in environment
|
|
4074
|
+
* @throws S3FSMountError if S3FS mount command fails
|
|
4075
|
+
* @throws InvalidMountConfigError if bucket name, mount path, or endpoint is invalid
|
|
4076
|
+
*/
|
|
3701
4077
|
async mountBucket(bucket, mountPath, options) {
|
|
3702
4078
|
this.logger.info(`Mounting bucket ${bucket} to ${mountPath}`);
|
|
4079
|
+
if ("localBucket" in options && options.localBucket) {
|
|
4080
|
+
await this.mountBucketLocal(bucket, mountPath, options);
|
|
4081
|
+
return;
|
|
4082
|
+
}
|
|
4083
|
+
await this.mountBucketFuse(bucket, mountPath, options);
|
|
4084
|
+
}
|
|
4085
|
+
/**
|
|
4086
|
+
* Local dev mount: bidirectional sync via R2 binding + file/watch APIs
|
|
4087
|
+
*/
|
|
4088
|
+
async mountBucketLocal(bucket, mountPath, options) {
|
|
4089
|
+
const r2Binding = this.env[bucket];
|
|
4090
|
+
if (!r2Binding || !isR2Bucket(r2Binding)) throw new InvalidMountConfigError(`R2 binding "${bucket}" not found in env or is not an R2Bucket. Make sure the binding name matches your wrangler.jsonc R2 binding.`);
|
|
4091
|
+
if (!mountPath || !mountPath.startsWith("/")) throw new InvalidMountConfigError(`Invalid mount path: "${mountPath}". Must be an absolute path starting with /`);
|
|
4092
|
+
if (this.activeMounts.has(mountPath)) throw new InvalidMountConfigError(`Mount path already in use: ${mountPath}`);
|
|
4093
|
+
const sessionId = await this.ensureDefaultSession();
|
|
4094
|
+
const syncManager = new LocalMountSyncManager({
|
|
4095
|
+
bucket: r2Binding,
|
|
4096
|
+
mountPath,
|
|
4097
|
+
prefix: options.prefix,
|
|
4098
|
+
readOnly: options.readOnly ?? false,
|
|
4099
|
+
client: this.client,
|
|
4100
|
+
sessionId,
|
|
4101
|
+
logger: this.logger
|
|
4102
|
+
});
|
|
4103
|
+
const mountInfo = {
|
|
4104
|
+
mountType: "local-sync",
|
|
4105
|
+
bucket,
|
|
4106
|
+
mountPath,
|
|
4107
|
+
syncManager,
|
|
4108
|
+
mounted: false
|
|
4109
|
+
};
|
|
4110
|
+
this.activeMounts.set(mountPath, mountInfo);
|
|
4111
|
+
try {
|
|
4112
|
+
await syncManager.start();
|
|
4113
|
+
mountInfo.mounted = true;
|
|
4114
|
+
this.logger.info(`Successfully mounted bucket ${bucket} to ${mountPath} (local sync)`);
|
|
4115
|
+
} catch (error) {
|
|
4116
|
+
await syncManager.stop();
|
|
4117
|
+
this.activeMounts.delete(mountPath);
|
|
4118
|
+
throw error;
|
|
4119
|
+
}
|
|
4120
|
+
}
|
|
4121
|
+
/**
|
|
4122
|
+
* Production mount: S3FS-FUSE inside the container
|
|
4123
|
+
*/
|
|
4124
|
+
async mountBucketFuse(bucket, mountPath, options) {
|
|
3703
4125
|
const prefix = options.prefix || void 0;
|
|
3704
4126
|
this.validateMountOptions(bucket, mountPath, {
|
|
3705
4127
|
...options,
|
|
@@ -3713,26 +4135,21 @@ var Sandbox = class Sandbox extends Container {
|
|
|
3713
4135
|
});
|
|
3714
4136
|
const credentials = detectCredentials(options, this.envVars);
|
|
3715
4137
|
const passwordFilePath = this.generatePasswordFilePath();
|
|
3716
|
-
|
|
4138
|
+
const mountInfo = {
|
|
4139
|
+
mountType: "fuse",
|
|
3717
4140
|
bucket: s3fsSource,
|
|
3718
4141
|
mountPath,
|
|
3719
4142
|
endpoint: options.endpoint,
|
|
3720
4143
|
provider,
|
|
3721
4144
|
passwordFilePath,
|
|
3722
4145
|
mounted: false
|
|
3723
|
-
}
|
|
4146
|
+
};
|
|
4147
|
+
this.activeMounts.set(mountPath, mountInfo);
|
|
3724
4148
|
try {
|
|
3725
4149
|
await this.createPasswordFile(passwordFilePath, bucket, credentials);
|
|
3726
4150
|
await this.exec(`mkdir -p ${shellEscape(mountPath)}`);
|
|
3727
4151
|
await this.executeS3FSMount(s3fsSource, mountPath, options, provider, passwordFilePath);
|
|
3728
|
-
|
|
3729
|
-
bucket: s3fsSource,
|
|
3730
|
-
mountPath,
|
|
3731
|
-
endpoint: options.endpoint,
|
|
3732
|
-
provider,
|
|
3733
|
-
passwordFilePath,
|
|
3734
|
-
mounted: true
|
|
3735
|
-
});
|
|
4152
|
+
mountInfo.mounted = true;
|
|
3736
4153
|
this.logger.info(`Successfully mounted bucket ${bucket} to ${mountPath}`);
|
|
3737
4154
|
} catch (error) {
|
|
3738
4155
|
await this.deletePasswordFile(passwordFilePath);
|
|
@@ -3750,7 +4167,11 @@ var Sandbox = class Sandbox extends Container {
|
|
|
3750
4167
|
this.logger.info(`Unmounting bucket from ${mountPath}`);
|
|
3751
4168
|
const mountInfo = this.activeMounts.get(mountPath);
|
|
3752
4169
|
if (!mountInfo) throw new InvalidMountConfigError(`No active mount found at path: ${mountPath}`);
|
|
3753
|
-
|
|
4170
|
+
if (mountInfo.mountType === "local-sync") {
|
|
4171
|
+
await mountInfo.syncManager.stop();
|
|
4172
|
+
mountInfo.mounted = false;
|
|
4173
|
+
this.activeMounts.delete(mountPath);
|
|
4174
|
+
} else try {
|
|
3754
4175
|
await this.exec(`fusermount -u ${shellEscape(mountPath)}`);
|
|
3755
4176
|
mountInfo.mounted = false;
|
|
3756
4177
|
this.activeMounts.delete(mountPath);
|
|
@@ -3763,7 +4184,6 @@ var Sandbox = class Sandbox extends Container {
|
|
|
3763
4184
|
* Validate mount options
|
|
3764
4185
|
*/
|
|
3765
4186
|
validateMountOptions(bucket, mountPath, options) {
|
|
3766
|
-
if (!options.endpoint) throw new InvalidMountConfigError("Endpoint is required. Provide the full S3-compatible endpoint URL.");
|
|
3767
4187
|
try {
|
|
3768
4188
|
new URL(options.endpoint);
|
|
3769
4189
|
} catch (error) {
|
|
@@ -3832,7 +4252,14 @@ var Sandbox = class Sandbox extends Container {
|
|
|
3832
4252
|
await this.client.desktop.stop();
|
|
3833
4253
|
} catch {}
|
|
3834
4254
|
this.client.disconnect();
|
|
3835
|
-
for (const [mountPath, mountInfo] of this.activeMounts.entries()) {
|
|
4255
|
+
for (const [mountPath, mountInfo] of this.activeMounts.entries()) if (mountInfo.mountType === "local-sync") try {
|
|
4256
|
+
await mountInfo.syncManager.stop();
|
|
4257
|
+
mountInfo.mounted = false;
|
|
4258
|
+
} catch (error) {
|
|
4259
|
+
const errorMsg = error instanceof Error ? error.message : String(error);
|
|
4260
|
+
this.logger.warn(`Failed to stop local sync for ${mountPath}: ${errorMsg}`);
|
|
4261
|
+
}
|
|
4262
|
+
else {
|
|
3836
4263
|
if (mountInfo.mounted) try {
|
|
3837
4264
|
this.logger.info(`Unmounting bucket ${mountInfo.bucket} from ${mountPath}`);
|
|
3838
4265
|
await this.exec(`fusermount -u ${shellEscape(mountPath)}`);
|
|
@@ -3876,6 +4303,7 @@ var Sandbox = class Sandbox extends Container {
|
|
|
3876
4303
|
}
|
|
3877
4304
|
async onStop() {
|
|
3878
4305
|
this.logger.debug("Sandbox stopped");
|
|
4306
|
+
for (const [, m] of this.activeMounts) if (m.mountType === "local-sync") await m.syncManager.stop().catch(() => {});
|
|
3879
4307
|
this.defaultSession = null;
|
|
3880
4308
|
this.activeMounts.clear();
|
|
3881
4309
|
await Promise.all([this.ctx.storage.delete("portTokens"), this.ctx.storage.delete("defaultSession")]);
|
|
@@ -4350,17 +4778,17 @@ var Sandbox = class Sandbox extends Container {
|
|
|
4350
4778
|
* Wait for a port to become available (for process readiness checking)
|
|
4351
4779
|
*/
|
|
4352
4780
|
async waitForPortReady(processId, command, port, options) {
|
|
4353
|
-
const { mode = "http", path = "/", status = {
|
|
4781
|
+
const { mode = "http", path: path$1 = "/", status = {
|
|
4354
4782
|
min: 200,
|
|
4355
4783
|
max: 399
|
|
4356
4784
|
}, timeout, interval = 500 } = options ?? {};
|
|
4357
|
-
const conditionStr = mode === "http" ? `port ${port} (HTTP ${path})` : `port ${port} (TCP)`;
|
|
4785
|
+
const conditionStr = mode === "http" ? `port ${port} (HTTP ${path$1})` : `port ${port} (TCP)`;
|
|
4358
4786
|
const statusMin = typeof status === "number" ? status : status.min;
|
|
4359
4787
|
const statusMax = typeof status === "number" ? status : status.max;
|
|
4360
4788
|
const stream = await this.client.ports.watchPort({
|
|
4361
4789
|
port,
|
|
4362
4790
|
mode,
|
|
4363
|
-
path,
|
|
4791
|
+
path: path$1,
|
|
4364
4792
|
statusMin,
|
|
4365
4793
|
statusMax,
|
|
4366
4794
|
processId,
|
|
@@ -4620,17 +5048,17 @@ var Sandbox = class Sandbox extends Container {
|
|
|
4620
5048
|
depth: options?.depth
|
|
4621
5049
|
});
|
|
4622
5050
|
}
|
|
4623
|
-
async mkdir(path, options = {}) {
|
|
5051
|
+
async mkdir(path$1, options = {}) {
|
|
4624
5052
|
const session = options.sessionId ?? await this.ensureDefaultSession();
|
|
4625
|
-
return this.client.files.mkdir(path, session, { recursive: options.recursive });
|
|
5053
|
+
return this.client.files.mkdir(path$1, session, { recursive: options.recursive });
|
|
4626
5054
|
}
|
|
4627
|
-
async writeFile(path, content, options = {}) {
|
|
5055
|
+
async writeFile(path$1, content, options = {}) {
|
|
4628
5056
|
const session = options.sessionId ?? await this.ensureDefaultSession();
|
|
4629
|
-
return this.client.files.writeFile(path, content, session, { encoding: options.encoding });
|
|
5057
|
+
return this.client.files.writeFile(path$1, content, session, { encoding: options.encoding });
|
|
4630
5058
|
}
|
|
4631
|
-
async deleteFile(path, sessionId) {
|
|
5059
|
+
async deleteFile(path$1, sessionId) {
|
|
4632
5060
|
const session = sessionId ?? await this.ensureDefaultSession();
|
|
4633
|
-
return this.client.files.deleteFile(path, session);
|
|
5061
|
+
return this.client.files.deleteFile(path$1, session);
|
|
4634
5062
|
}
|
|
4635
5063
|
async renameFile(oldPath, newPath, sessionId) {
|
|
4636
5064
|
const session = sessionId ?? await this.ensureDefaultSession();
|
|
@@ -4640,9 +5068,9 @@ var Sandbox = class Sandbox extends Container {
|
|
|
4640
5068
|
const session = sessionId ?? await this.ensureDefaultSession();
|
|
4641
5069
|
return this.client.files.moveFile(sourcePath, destinationPath, session);
|
|
4642
5070
|
}
|
|
4643
|
-
async readFile(path, options = {}) {
|
|
5071
|
+
async readFile(path$1, options = {}) {
|
|
4644
5072
|
const session = options.sessionId ?? await this.ensureDefaultSession();
|
|
4645
|
-
return this.client.files.readFile(path, session, { encoding: options.encoding });
|
|
5073
|
+
return this.client.files.readFile(path$1, session, { encoding: options.encoding });
|
|
4646
5074
|
}
|
|
4647
5075
|
/**
|
|
4648
5076
|
* Stream a file from the sandbox using Server-Sent Events
|
|
@@ -4650,17 +5078,17 @@ var Sandbox = class Sandbox extends Container {
|
|
|
4650
5078
|
* @param path - Path to the file to stream
|
|
4651
5079
|
* @param options - Optional session ID
|
|
4652
5080
|
*/
|
|
4653
|
-
async readFileStream(path, options = {}) {
|
|
5081
|
+
async readFileStream(path$1, options = {}) {
|
|
4654
5082
|
const session = options.sessionId ?? await this.ensureDefaultSession();
|
|
4655
|
-
return this.client.files.readFileStream(path, session);
|
|
5083
|
+
return this.client.files.readFileStream(path$1, session);
|
|
4656
5084
|
}
|
|
4657
|
-
async listFiles(path, options) {
|
|
5085
|
+
async listFiles(path$1, options) {
|
|
4658
5086
|
const session = await this.ensureDefaultSession();
|
|
4659
|
-
return this.client.files.listFiles(path, session, options);
|
|
5087
|
+
return this.client.files.listFiles(path$1, session, options);
|
|
4660
5088
|
}
|
|
4661
|
-
async exists(path, sessionId) {
|
|
5089
|
+
async exists(path$1, sessionId) {
|
|
4662
5090
|
const session = sessionId ?? await this.ensureDefaultSession();
|
|
4663
|
-
return this.client.files.exists(path, session);
|
|
5091
|
+
return this.client.files.exists(path$1, session);
|
|
4664
5092
|
}
|
|
4665
5093
|
/**
|
|
4666
5094
|
* Get the noVNC preview URL for browser-based desktop viewing.
|
|
@@ -4709,10 +5137,10 @@ var Sandbox = class Sandbox extends Container {
|
|
|
4709
5137
|
* @param path - Path to watch (absolute or relative to /workspace)
|
|
4710
5138
|
* @param options - Watch options
|
|
4711
5139
|
*/
|
|
4712
|
-
async watch(path, options = {}) {
|
|
5140
|
+
async watch(path$1, options = {}) {
|
|
4713
5141
|
const sessionId = options.sessionId ?? await this.ensureDefaultSession();
|
|
4714
5142
|
return this.client.watch.watch({
|
|
4715
|
-
path,
|
|
5143
|
+
path: path$1,
|
|
4716
5144
|
recursive: options.recursive,
|
|
4717
5145
|
include: options.include,
|
|
4718
5146
|
exclude: options.exclude,
|
|
@@ -4921,28 +5349,28 @@ var Sandbox = class Sandbox extends Container {
|
|
|
4921
5349
|
cleanupCompletedProcesses: () => this.cleanupCompletedProcesses(),
|
|
4922
5350
|
getProcessLogs: (id) => this.getProcessLogs(id),
|
|
4923
5351
|
streamProcessLogs: (processId, options) => this.streamProcessLogs(processId, options),
|
|
4924
|
-
writeFile: (path, content, options) => this.writeFile(path, content, {
|
|
5352
|
+
writeFile: (path$1, content, options) => this.writeFile(path$1, content, {
|
|
4925
5353
|
...options,
|
|
4926
5354
|
sessionId
|
|
4927
5355
|
}),
|
|
4928
|
-
readFile: (path, options) => this.readFile(path, {
|
|
5356
|
+
readFile: (path$1, options) => this.readFile(path$1, {
|
|
4929
5357
|
...options,
|
|
4930
5358
|
sessionId
|
|
4931
5359
|
}),
|
|
4932
|
-
readFileStream: (path) => this.readFileStream(path, { sessionId }),
|
|
4933
|
-
watch: (path, options) => this.watch(path, {
|
|
5360
|
+
readFileStream: (path$1) => this.readFileStream(path$1, { sessionId }),
|
|
5361
|
+
watch: (path$1, options) => this.watch(path$1, {
|
|
4934
5362
|
...options,
|
|
4935
5363
|
sessionId
|
|
4936
5364
|
}),
|
|
4937
|
-
mkdir: (path, options) => this.mkdir(path, {
|
|
5365
|
+
mkdir: (path$1, options) => this.mkdir(path$1, {
|
|
4938
5366
|
...options,
|
|
4939
5367
|
sessionId
|
|
4940
5368
|
}),
|
|
4941
|
-
deleteFile: (path) => this.deleteFile(path, sessionId),
|
|
5369
|
+
deleteFile: (path$1) => this.deleteFile(path$1, sessionId),
|
|
4942
5370
|
renameFile: (oldPath, newPath) => this.renameFile(oldPath, newPath, sessionId),
|
|
4943
5371
|
moveFile: (sourcePath, destPath) => this.moveFile(sourcePath, destPath, sessionId),
|
|
4944
|
-
listFiles: (path, options) => this.client.files.listFiles(path, sessionId, options),
|
|
4945
|
-
exists: (path) => this.exists(path, sessionId),
|
|
5372
|
+
listFiles: (path$1, options) => this.client.files.listFiles(path$1, sessionId, options),
|
|
5373
|
+
exists: (path$1) => this.exists(path$1, sessionId),
|
|
4946
5374
|
gitCheckout: (repoUrl, options) => this.gitCheckout(repoUrl, {
|
|
4947
5375
|
...options,
|
|
4948
5376
|
sessionId
|
|
@@ -5259,7 +5687,7 @@ var Sandbox = class Sandbox extends Container {
|
|
|
5259
5687
|
this.requirePresignedUrlSupport();
|
|
5260
5688
|
const DEFAULT_TTL_SECONDS = 259200;
|
|
5261
5689
|
const MAX_NAME_LENGTH = 256;
|
|
5262
|
-
const { dir, name, ttl = DEFAULT_TTL_SECONDS } = options;
|
|
5690
|
+
const { dir, name, ttl = DEFAULT_TTL_SECONDS, gitignore = false, excludes = [] } = options;
|
|
5263
5691
|
Sandbox.validateBackupDir(dir, "BackupOptions.dir");
|
|
5264
5692
|
if (name !== void 0) {
|
|
5265
5693
|
if (typeof name !== "string" || name.length > MAX_NAME_LENGTH) throw new InvalidBackupConfigError({
|
|
@@ -5284,15 +5712,31 @@ var Sandbox = class Sandbox extends Container {
|
|
|
5284
5712
|
context: { reason: "ttl must be a positive number of seconds" },
|
|
5285
5713
|
timestamp: (/* @__PURE__ */ new Date()).toISOString()
|
|
5286
5714
|
});
|
|
5715
|
+
if (typeof gitignore !== "boolean") throw new InvalidBackupConfigError({
|
|
5716
|
+
message: "BackupOptions.gitignore must be a boolean",
|
|
5717
|
+
code: ErrorCode.INVALID_BACKUP_CONFIG,
|
|
5718
|
+
httpStatus: 400,
|
|
5719
|
+
context: { reason: "gitignore must be a boolean" },
|
|
5720
|
+
timestamp: (/* @__PURE__ */ new Date()).toISOString()
|
|
5721
|
+
});
|
|
5722
|
+
if (!Array.isArray(excludes) || !excludes.every((e) => typeof e === "string")) throw new InvalidBackupConfigError({
|
|
5723
|
+
message: "BackupOptions.excludes must be an array of strings",
|
|
5724
|
+
code: ErrorCode.INVALID_BACKUP_CONFIG,
|
|
5725
|
+
httpStatus: 400,
|
|
5726
|
+
context: { reason: "excludes must be an array of strings" },
|
|
5727
|
+
timestamp: (/* @__PURE__ */ new Date()).toISOString()
|
|
5728
|
+
});
|
|
5287
5729
|
const backupSession = await this.ensureBackupSession();
|
|
5288
5730
|
const backupId = crypto.randomUUID();
|
|
5289
5731
|
const archivePath = `/var/backups/${backupId}.sqsh`;
|
|
5290
5732
|
this.logger.info("Creating backup", {
|
|
5291
5733
|
backupId,
|
|
5292
5734
|
dir,
|
|
5293
|
-
name
|
|
5735
|
+
name,
|
|
5736
|
+
gitignore,
|
|
5737
|
+
excludes
|
|
5294
5738
|
});
|
|
5295
|
-
const createResult = await this.client.backup.createArchive(dir, archivePath, backupSession);
|
|
5739
|
+
const createResult = await this.client.backup.createArchive(dir, archivePath, backupSession, gitignore, excludes);
|
|
5296
5740
|
if (!createResult.success) throw new BackupCreateError({
|
|
5297
5741
|
message: "Container failed to create backup archive",
|
|
5298
5742
|
code: ErrorCode.BACKUP_CREATE_FAILED,
|