@cloudflare/sandbox 0.8.8 → 0.8.10

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.js CHANGED
@@ -1,5 +1,5 @@
1
- import { _ as extractRepoName, a as isExecResult, b as partitionEnvVars, c as parseSSEFrames, d as createNoOpLogger, f as TraceContext, g as GitLogger, h as ResultImpl, i as isWSStreamChunk, l as shellEscape, m as Execution, n as isWSError, o as isProcess, p as logCanonicalEvent, r as isWSResponse, s as isProcessStatus, t as generateRequestId, u as createLogger, v as filterEnvVars, y as getEnvString } from "./dist-CmfvOT-w.js";
2
- import { t as ErrorCode } from "./errors-CaSfB5Bm.js";
1
+ import { _ as GitLogger, a as isExecResult, b as getEnvString, c as parseSSEFrames, d as createNoOpLogger, f as TraceContext, g as DEFAULT_GIT_CLONE_TIMEOUT_MS, h as ResultImpl, i as isWSStreamChunk, l as shellEscape, m as Execution, n as isWSError, o as isProcess, p as logCanonicalEvent, r as isWSResponse, s as isProcessStatus, t as generateRequestId, u as createLogger, v as extractRepoName, x as partitionEnvVars, y as filterEnvVars } from "./dist-CR1a2zcN.js";
2
+ import { t as ErrorCode } from "./errors-DJtO4mmS.js";
3
3
  import { Container, ContainerProxy, getContainer, switchPort } from "@cloudflare/containers";
4
4
  import { AwsClient } from "aws4fetch";
5
5
  import path from "node:path/posix";
@@ -938,7 +938,7 @@ var WebSocketTransport = class extends BaseTransport {
938
938
  const method = options?.method || "GET";
939
939
  const body = this.parseBody(options?.body);
940
940
  const headers = this.normalizeHeaders(options?.headers);
941
- const result = await this.request(method, path$1, body, headers);
941
+ const result = await this.request(method, path$1, body, headers, options?.requestTimeoutMs);
942
942
  return new Response(JSON.stringify(result.body), {
943
943
  status: result.status,
944
944
  headers: { "Content-Type": "application/json" }
@@ -1094,7 +1094,7 @@ var WebSocketTransport = class extends BaseTransport {
1094
1094
  * the case where the WebSocket was closed between `doFetch` and `request`
1095
1095
  * (idle disconnect).
1096
1096
  */
1097
- async request(method, path$1, body, headers) {
1097
+ async request(method, path$1, body, headers, requestTimeoutMs) {
1098
1098
  await this.connect();
1099
1099
  this.clearIdleDisconnectTimer();
1100
1100
  const id = generateRequestId();
@@ -1107,7 +1107,7 @@ var WebSocketTransport = class extends BaseTransport {
1107
1107
  headers
1108
1108
  };
1109
1109
  return new Promise((resolve, reject) => {
1110
- const timeoutMs = this.config.requestTimeoutMs ?? DEFAULT_REQUEST_TIMEOUT_MS;
1110
+ const timeoutMs = requestTimeoutMs ?? this.config.requestTimeoutMs ?? DEFAULT_REQUEST_TIMEOUT_MS;
1111
1111
  const timeoutId = setTimeout(() => {
1112
1112
  this.pendingRequests.delete(id);
1113
1113
  this.scheduleIdleDisconnect();
@@ -1534,11 +1534,12 @@ var BaseHttpClient = class {
1534
1534
  /**
1535
1535
  * Make a POST request with JSON body
1536
1536
  */
1537
- async post(endpoint, data, responseHandler) {
1537
+ async post(endpoint, data, responseHandler, requestOptions) {
1538
1538
  const response = await this.doFetch(endpoint, {
1539
1539
  method: "POST",
1540
1540
  headers: { "Content-Type": "application/json" },
1541
- body: JSON.stringify(data)
1541
+ body: JSON.stringify(data),
1542
+ ...requestOptions
1542
1543
  });
1543
1544
  return this.handleResponse(response, responseHandler);
1544
1545
  }
@@ -2218,7 +2219,8 @@ var FileClient = class extends BaseHttpClient {
2218
2219
  /**
2219
2220
  * Client for Git repository operations
2220
2221
  */
2221
- var GitClient = class extends BaseHttpClient {
2222
+ var GitClient = class GitClient extends BaseHttpClient {
2223
+ static REQUEST_TIMEOUT_BUFFER_MS = 3e4;
2222
2224
  constructor(options = {}) {
2223
2225
  super(options);
2224
2226
  this.logger = new GitLogger(this.logger);
@@ -2227,10 +2229,11 @@ var GitClient = class extends BaseHttpClient {
2227
2229
  * Clone a Git repository
2228
2230
  * @param repoUrl - URL of the Git repository to clone
2229
2231
  * @param sessionId - The session ID for this operation
2230
- * @param options - Optional settings (branch, targetDir, depth)
2232
+ * @param options - Optional settings (branch, targetDir, depth, timeoutMs)
2231
2233
  */
2232
2234
  async checkout(repoUrl, sessionId, options) {
2233
2235
  try {
2236
+ const timeoutMs = options?.timeoutMs ?? DEFAULT_GIT_CLONE_TIMEOUT_MS;
2234
2237
  let targetDir = options?.targetDir;
2235
2238
  if (!targetDir) targetDir = `/workspace/${extractRepoName(repoUrl)}`;
2236
2239
  const data = {
@@ -2243,7 +2246,9 @@ var GitClient = class extends BaseHttpClient {
2243
2246
  if (!Number.isInteger(options.depth) || options.depth <= 0) throw new Error(`Invalid depth value: ${options.depth}. Must be a positive integer (e.g., 1, 5, 10).`);
2244
2247
  data.depth = options.depth;
2245
2248
  }
2246
- return await this.post("/api/git/checkout", data);
2249
+ if (!Number.isInteger(timeoutMs) || timeoutMs <= 0) throw new Error(`Invalid timeout value: ${timeoutMs}. Must be a positive integer number of milliseconds.`);
2250
+ data.timeoutMs = timeoutMs;
2251
+ return await this.post("/api/git/checkout", data, void 0, { requestTimeoutMs: timeoutMs + GitClient.REQUEST_TIMEOUT_BUFFER_MS });
2247
2252
  } catch (error) {
2248
2253
  throw error;
2249
2254
  }
@@ -3529,10 +3534,11 @@ function isLocalhostPattern(hostname) {
3529
3534
  //#endregion
3530
3535
  //#region src/storage-mount/errors.ts
3531
3536
  /**
3532
- * Bucket mounting error classes
3537
+ * Bucket mount and unmount error classes
3533
3538
  *
3534
- * These are SDK-side validation errors that follow the same pattern as SecurityError.
3535
- * They are thrown before any container interaction occurs.
3539
+ * Validation errors (InvalidMountConfigError, MissingCredentialsError) are thrown
3540
+ * before any container interaction. BucketUnmountError is thrown after a failed
3541
+ * fusermount call inside the container.
3536
3542
  */
3537
3543
  /**
3538
3544
  * Base error for bucket mounting operations
@@ -3555,6 +3561,15 @@ var S3FSMountError = class extends BucketMountError {
3555
3561
  }
3556
3562
  };
3557
3563
  /**
3564
+ * Thrown when fusermount -u fails to unmount a FUSE filesystem
3565
+ */
3566
+ var BucketUnmountError = class extends BucketMountError {
3567
+ constructor(message) {
3568
+ super(message, ErrorCode.BUCKET_UNMOUNT_ERROR);
3569
+ this.name = "BucketUnmountError";
3570
+ }
3571
+ };
3572
+ /**
3558
3573
  * Thrown when no credentials found in environment
3559
3574
  */
3560
3575
  var MissingCredentialsError = class extends BucketMountError {
@@ -3687,7 +3702,7 @@ function buildS3fsSource(bucket, prefix) {
3687
3702
  * This file is auto-updated by .github/changeset-version.ts during releases
3688
3703
  * DO NOT EDIT MANUALLY - Changes will be overwritten on the next version bump
3689
3704
  */
3690
- const SDK_VERSION = "0.8.8";
3705
+ const SDK_VERSION = "0.8.10";
3691
3706
 
3692
3707
  //#endregion
3693
3708
  //#region src/sandbox.ts
@@ -4229,9 +4244,26 @@ var Sandbox = class Sandbox extends Container {
4229
4244
  mountInfo.mounted = false;
4230
4245
  this.activeMounts.delete(mountPath);
4231
4246
  } else try {
4232
- await this.execInternal(`fusermount -u ${shellEscape(mountPath)}`);
4247
+ const result = await this.execInternal(`fusermount -u ${shellEscape(mountPath)}`);
4248
+ if (result.exitCode !== 0) {
4249
+ const stderr = result.stderr || "unknown error";
4250
+ throw new BucketUnmountError(`fusermount -u failed (exit ${result.exitCode}): ${stderr}`);
4251
+ }
4233
4252
  mountInfo.mounted = false;
4234
4253
  this.activeMounts.delete(mountPath);
4254
+ try {
4255
+ const cleanup = await this.execInternal(`mountpoint -q ${shellEscape(mountPath)} || rmdir ${shellEscape(mountPath)}`);
4256
+ if (cleanup.exitCode !== 0) this.logger.warn("mount directory removal failed", {
4257
+ mountPath,
4258
+ exitCode: cleanup.exitCode,
4259
+ stderr: cleanup.stderr
4260
+ });
4261
+ } catch (err) {
4262
+ this.logger.warn("mount directory removal failed", {
4263
+ mountPath,
4264
+ error: err instanceof Error ? err.message : String(err)
4265
+ });
4266
+ }
4235
4267
  } finally {
4236
4268
  await this.deletePasswordFile(mountInfo.passwordFilePath);
4237
4269
  }
@@ -4295,7 +4327,7 @@ var Sandbox = class Sandbox extends Container {
4295
4327
  /**
4296
4328
  * Execute S3FS mount command
4297
4329
  */
4298
- async executeS3FSMount(bucket, mountPath, options, provider, passwordFilePath) {
4330
+ async executeS3FSMount(bucket, mountPath, options, provider, passwordFilePath, sessionId) {
4299
4331
  const resolvedOptions = resolveS3fsOptions(provider, options.s3fsOptions);
4300
4332
  const s3fsArgs = [];
4301
4333
  s3fsArgs.push(`passwd_file=${passwordFilePath}`);
@@ -4304,7 +4336,7 @@ var Sandbox = class Sandbox extends Container {
4304
4336
  s3fsArgs.push(`url=${options.endpoint}`);
4305
4337
  const optionsStr = shellEscape(s3fsArgs.join(","));
4306
4338
  const mountCmd = `s3fs ${shellEscape(bucket)} ${shellEscape(mountPath)} -o ${optionsStr}`;
4307
- const result = await this.execInternal(mountCmd);
4339
+ const result = sessionId ? await this.execWithSession(mountCmd, sessionId, { origin: "internal" }) : await this.execInternal(mountCmd);
4308
4340
  if (result.exitCode !== 0) throw new S3FSMountError(`S3FS mount failed: ${result.stderr || result.stdout || "Unknown error"}`);
4309
4341
  }
4310
4342
  /**
@@ -5171,7 +5203,8 @@ var Sandbox = class Sandbox extends Container {
5171
5203
  return this.client.git.checkout(repoUrl, session, {
5172
5204
  branch: options?.branch,
5173
5205
  targetDir: options?.targetDir,
5174
- depth: options?.depth
5206
+ depth: options?.depth,
5207
+ timeoutMs: options?.cloneTimeoutMs
5175
5208
  });
5176
5209
  }
5177
5210
  async mkdir(path$1, options = {}) {
@@ -5770,74 +5803,54 @@ var Sandbox = class Sandbox extends Container {
5770
5803
  }
5771
5804
  }
5772
5805
  /**
5773
- * Download a backup archive via presigned GET URL.
5774
- * The container curls the archive directly from R2, bypassing the DO.
5775
- * ~93 MB/s throughput vs ~0.6 MB/s for base64 writeFile.
5806
+ * Mount a backup archive from R2 via s3fs so squashfuse can read it lazily.
5776
5807
  */
5777
- async downloadBackupPresigned(archivePath, r2Key, expectedSize, backupId, dir, backupSession) {
5778
- const presignedUrl = await this.generatePresignedGetUrl(r2Key);
5779
- await this.execWithSession("mkdir -p /var/backups", backupSession, { origin: "internal" });
5780
- const tmpPath = `${archivePath}.tmp`;
5781
- const curlCmd = [
5782
- "curl -sSf",
5783
- "--connect-timeout 10",
5784
- "--max-time 1800",
5785
- "--retry 2",
5786
- "--retry-max-time 60",
5787
- `-o ${shellEscape(tmpPath)}`,
5788
- shellEscape(presignedUrl)
5789
- ].join(" ");
5790
- const result = await this.execWithSession(curlCmd, backupSession, {
5791
- timeout: 181e4,
5792
- origin: "internal"
5808
+ async mountBackupR2(mountPath, prefix, backupSession) {
5809
+ const { accountId, bucketName } = this.requirePresignedUrlSupport();
5810
+ const endpoint = `https://${accountId}.r2.cloudflarestorage.com`;
5811
+ const normalizedPrefix = prefix.startsWith("/") ? prefix : `/${prefix}`;
5812
+ const options = {
5813
+ endpoint,
5814
+ provider: "r2",
5815
+ readOnly: true,
5816
+ prefix: normalizedPrefix,
5817
+ s3fsOptions: ["use_path_request_style"]
5818
+ };
5819
+ const passwordFilePath = this.generatePasswordFilePath();
5820
+ const s3fsSource = buildS3fsSource(bucketName, normalizedPrefix);
5821
+ const envObj = this.env;
5822
+ const credentials = detectCredentials(options, {
5823
+ AWS_ACCESS_KEY_ID: getEnvString(envObj, "AWS_ACCESS_KEY_ID"),
5824
+ AWS_SECRET_ACCESS_KEY: getEnvString(envObj, "AWS_SECRET_ACCESS_KEY"),
5825
+ R2_ACCESS_KEY_ID: this.r2AccessKeyId || void 0,
5826
+ R2_SECRET_ACCESS_KEY: this.r2SecretAccessKey || void 0,
5827
+ ...this.envVars
5793
5828
  });
5794
- if (result.exitCode !== 0) {
5795
- await this.execWithSession(`rm -f ${shellEscape(tmpPath)}`, backupSession, { origin: "internal" }).catch(() => {});
5796
- throw new BackupRestoreError({
5797
- message: `Presigned URL download failed (exit code ${result.exitCode}): ${result.stderr}`,
5798
- code: ErrorCode.BACKUP_RESTORE_FAILED,
5799
- httpStatus: 500,
5800
- context: {
5801
- dir,
5802
- backupId
5803
- },
5804
- timestamp: (/* @__PURE__ */ new Date()).toISOString()
5805
- });
5806
- }
5807
- const sizeCheck = await this.execWithSession(`stat -c %s ${shellEscape(tmpPath)}`, backupSession, { origin: "internal" });
5808
- const actualSize = parseInt(sizeCheck.stdout.trim(), 10);
5809
- if (actualSize !== expectedSize) {
5810
- await this.execWithSession(`rm -f ${shellEscape(tmpPath)}`, backupSession, { origin: "internal" }).catch(() => {});
5811
- throw new BackupRestoreError({
5812
- message: `Downloaded archive size mismatch: expected ${expectedSize}, got ${actualSize}`,
5813
- code: ErrorCode.BACKUP_RESTORE_FAILED,
5814
- httpStatus: 500,
5815
- context: {
5816
- dir,
5817
- backupId
5818
- },
5819
- timestamp: (/* @__PURE__ */ new Date()).toISOString()
5820
- });
5821
- }
5822
- const mvResult = await this.execWithSession(`mv ${shellEscape(tmpPath)} ${shellEscape(archivePath)}`, backupSession, { origin: "internal" });
5823
- if (mvResult.exitCode !== 0) {
5824
- await this.execWithSession(`rm -f ${shellEscape(tmpPath)}`, backupSession, { origin: "internal" }).catch(() => {});
5825
- throw new BackupRestoreError({
5826
- message: `Failed to finalize downloaded archive: ${mvResult.stderr}`,
5827
- code: ErrorCode.BACKUP_RESTORE_FAILED,
5828
- httpStatus: 500,
5829
- context: {
5830
- dir,
5831
- backupId
5832
- },
5833
- timestamp: (/* @__PURE__ */ new Date()).toISOString()
5834
- });
5829
+ const mountInfo = {
5830
+ mountType: "fuse",
5831
+ bucket: s3fsSource,
5832
+ mountPath,
5833
+ endpoint,
5834
+ provider: "r2",
5835
+ passwordFilePath,
5836
+ mounted: false
5837
+ };
5838
+ this.activeMounts.set(mountPath, mountInfo);
5839
+ try {
5840
+ await this.createPasswordFile(passwordFilePath, bucketName, credentials);
5841
+ await this.execWithSession(`mkdir -p ${shellEscape(mountPath)}`, backupSession, { origin: "internal" });
5842
+ await this.executeS3FSMount(s3fsSource, mountPath, options, "r2", passwordFilePath, backupSession);
5843
+ mountInfo.mounted = true;
5844
+ } catch (error) {
5845
+ await this.deletePasswordFile(passwordFilePath);
5846
+ this.activeMounts.delete(mountPath);
5847
+ throw error;
5835
5848
  }
5836
5849
  }
5837
5850
  /**
5838
5851
  * Serialize backup operations on this sandbox instance.
5839
5852
  * Concurrent backup/restore calls are queued so the multi-step
5840
- * create-archive → read → upload (or downloadwrite → extract) flow
5853
+ * create-archive → read → upload (or mount → extract) flow
5841
5854
  * is not interleaved with another backup operation on the same directory.
5842
5855
  */
5843
5856
  enqueueBackupOp(fn) {
@@ -5984,7 +5997,7 @@ var Sandbox = class Sandbox extends Container {
5984
5997
  *
5985
5998
  * Flow:
5986
5999
  * 1. DO reads metadata from R2 and checks TTL
5987
- * 2. Container downloads the archive directly from R2 via presigned URL
6000
+ * 2. Container mounts the backup archive from R2 via s3fs
5988
6001
  * 3. Container mounts the squashfs archive with FUSE overlayfs
5989
6002
  *
5990
6003
  * The target directory becomes an overlay mount with the backup as a
@@ -6068,8 +6081,7 @@ var Sandbox = class Sandbox extends Container {
6068
6081
  timestamp: (/* @__PURE__ */ new Date()).toISOString()
6069
6082
  });
6070
6083
  const r2Key = `backups/${id}/data.sqsh`;
6071
- const archiveHead = await bucket.head(r2Key);
6072
- if (!archiveHead) throw new BackupNotFoundError({
6084
+ if (!await bucket.head(r2Key)) throw new BackupNotFoundError({
6073
6085
  message: `Backup archive not found in R2: ${id}. The archive may have been deleted by R2 lifecycle rules.`,
6074
6086
  code: ErrorCode.BACKUP_NOT_FOUND,
6075
6087
  httpStatus: 404,
@@ -6077,12 +6089,19 @@ var Sandbox = class Sandbox extends Container {
6077
6089
  timestamp: (/* @__PURE__ */ new Date()).toISOString()
6078
6090
  });
6079
6091
  backupSession = await this.ensureBackupSession();
6080
- const archivePath = `/var/backups/${id}.sqsh`;
6081
- const mountGlob = `/var/backups/mounts/${id}`;
6092
+ const r2MountPath = `/var/backups/r2mount/${id}`;
6093
+ const archivePath = `${r2MountPath}/data.sqsh`;
6094
+ const mountGlob = `/var/backups/mounts/r2mount/${id}/data`;
6082
6095
  await this.execWithSession(`/usr/bin/fusermount3 -uz ${shellEscape(dir)} 2>/dev/null || true`, backupSession, { origin: "internal" }).catch(() => {});
6083
6096
  await this.execWithSession(`for d in ${shellEscape(mountGlob)}_*/lower ${shellEscape(mountGlob)}/lower; do [ -d "$d" ] && /usr/bin/fusermount3 -uz "$d" 2>/dev/null; done; true`, backupSession, { origin: "internal" }).catch(() => {});
6084
- const sizeCheck = await this.execWithSession(`stat -c %s ${shellEscape(archivePath)} 2>/dev/null || echo 0`, backupSession, { origin: "internal" }).catch(() => ({ stdout: "0" }));
6085
- if (Number.parseInt((sizeCheck.stdout ?? "0").trim(), 10) !== archiveHead.size) await this.downloadBackupPresigned(archivePath, r2Key, archiveHead.size, id, dir, backupSession);
6097
+ await this.execWithSession(`/usr/bin/fusermount3 -u ${shellEscape(r2MountPath)} 2>/dev/null; /usr/bin/fusermount3 -uz ${shellEscape(r2MountPath)} 2>/dev/null; true`, backupSession, { origin: "internal" }).catch(() => {});
6098
+ const previousBackupMount = this.activeMounts.get(r2MountPath);
6099
+ if (previousBackupMount?.mountType === "fuse") {
6100
+ previousBackupMount.mounted = false;
6101
+ this.activeMounts.delete(r2MountPath);
6102
+ await this.deletePasswordFile(previousBackupMount.passwordFilePath);
6103
+ }
6104
+ await this.mountBackupR2(r2MountPath, `backups/${id}/`, backupSession);
6086
6105
  if (!(await this.client.backup.restoreArchive(dir, archivePath, backupSession)).success) throw new BackupRestoreError({
6087
6106
  message: "Container failed to restore backup archive",
6088
6107
  code: ErrorCode.BACKUP_RESTORE_FAILED,
@@ -6101,10 +6120,6 @@ var Sandbox = class Sandbox extends Container {
6101
6120
  };
6102
6121
  } catch (error) {
6103
6122
  caughtError = error instanceof Error ? error : new Error(String(error));
6104
- if (id && backupSession) {
6105
- const archivePath = `/var/backups/${id}.sqsh`;
6106
- await this.execWithSession(`rm -f ${shellEscape(archivePath)}`, backupSession, { origin: "internal" }).catch(() => {});
6107
- }
6108
6123
  throw error;
6109
6124
  } finally {
6110
6125
  if (backupSession) await this.client.utils.deleteSession(backupSession).catch(() => {});
@@ -6243,5 +6258,5 @@ async function collectFile(stream) {
6243
6258
  }
6244
6259
 
6245
6260
  //#endregion
6246
- export { BackupClient, BackupCreateError, BackupExpiredError, BackupNotFoundError, BackupRestoreError, BucketMountError, CodeInterpreter, CommandClient, ContainerProxy, DesktopClient, DesktopInvalidCoordinatesError, DesktopInvalidOptionsError, DesktopNotStartedError, DesktopProcessCrashedError, DesktopStartFailedError, DesktopUnavailableError, FileClient, GitClient, InvalidBackupConfigError, InvalidMountConfigError, MissingCredentialsError, PortClient, ProcessClient, ProcessExitedBeforeReadyError, ProcessReadyTimeoutError, S3FSMountError, Sandbox, SandboxClient, UtilityClient, asyncIterableToSSEStream, collectFile, getSandbox, isExecResult, isProcess, isProcessStatus, parseSSEStream, proxyTerminal, proxyToSandbox, responseToAsyncIterable, streamFile };
6261
+ export { BackupClient, BackupCreateError, BackupExpiredError, BackupNotFoundError, BackupRestoreError, BucketMountError, BucketUnmountError, CodeInterpreter, CommandClient, ContainerProxy, DesktopClient, DesktopInvalidCoordinatesError, DesktopInvalidOptionsError, DesktopNotStartedError, DesktopProcessCrashedError, DesktopStartFailedError, DesktopUnavailableError, FileClient, GitClient, InvalidBackupConfigError, InvalidMountConfigError, MissingCredentialsError, PortClient, ProcessClient, ProcessExitedBeforeReadyError, ProcessReadyTimeoutError, S3FSMountError, Sandbox, SandboxClient, UtilityClient, asyncIterableToSSEStream, collectFile, getSandbox, isExecResult, isProcess, isProcessStatus, parseSSEStream, proxyTerminal, proxyToSandbox, responseToAsyncIterable, streamFile };
6247
6262
  //# sourceMappingURL=index.js.map