@fastgpt-sdk/sandbox-adapter 0.0.34 → 0.0.36

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -13,12 +13,33 @@ export declare class CommandPolyfillService {
13
13
  private readonly executor;
14
14
  constructor(executor: ICommandExecution);
15
15
  /**
16
- * Read a file via base64 encoding.
17
- * Uses: cat <file> | base64
16
+ * Chunk size used when reading files through command execution. Each chunk
17
+ * produces a base64-encoded stdout of roughly `READ_CHUNK_SIZE * 4 / 3`
18
+ * bytes, which must fit within the executor's stdout byte cap (default
19
+ * 1 MiB) with room to spare.
20
+ */
21
+ private static readonly READ_CHUNK_SIZE;
22
+ /**
23
+ * Read a file in chunks via `dd | base64`. Uses `stat` to discover the file
24
+ * size, then issues range reads so that no single command's stdout exceeds
25
+ * the executor's bounded output limit.
26
+ *
27
+ * Falls back to a single `cat | base64` read when `stat` fails (e.g. when
28
+ * the sandbox lacks GNU stat). That fallback is bounded by the caller's
29
+ * `maxOutputBytes`, so very large files require stat-based chunking.
18
30
  */
19
31
  readFile(path: string): Promise<Uint8Array>;
20
32
  /**
21
- * Read a portion of a file via dd + base64.
33
+ * Return the file size in bytes, or undefined if stat fails (e.g. the file
34
+ * does not exist or stat is unavailable).
35
+ */
36
+ private statSize;
37
+ /**
38
+ * Read a portion of a file via `tail -c +N | head -c M | base64`.
39
+ *
40
+ * `tail -c +N` emits bytes starting at position N (1-indexed). `head -c M`
41
+ * caps the length. Both are POSIX-ish and avoid the `dd bs=1` one-syscall-
42
+ * per-byte trap that made the old implementation unusable for large reads.
22
43
  */
23
44
  readFileRange(path: string, start: number, end?: number): Promise<Uint8Array>;
24
45
  /**
@@ -26,6 +47,17 @@ export declare class CommandPolyfillService {
26
47
  * Uses: echo <base64> | base64 -d > <file>
27
48
  */
28
49
  writeFile(path: string, data: Uint8Array): Promise<number>;
50
+ /**
51
+ * Append `data` to `path`, chunking the base64 payload to stay under the
52
+ * shell command line length limit. Set `truncate` to rewrite the file
53
+ * from scratch on the first append.
54
+ *
55
+ * Parent directory creation runs once when `truncate` is set, so streaming
56
+ * writes pay the mkdir cost only on the first chunk.
57
+ */
58
+ appendBytes(path: string, data: Uint8Array, options?: {
59
+ truncate?: boolean;
60
+ }): Promise<void>;
29
61
  /**
30
62
  * Write a text file directly.
31
63
  */
@@ -12,6 +12,16 @@ export interface ExecuteOptions {
12
12
  env?: Record<string, string>;
13
13
  /** Abort signal for cancellation */
14
14
  signal?: AbortSignal;
15
+ /**
16
+ * Maximum number of bytes to retain in stdout / stderr for the returned
17
+ * {@link ExecuteResult}. Output beyond this limit is dropped (oldest first)
18
+ * and `truncated` is set to true. Streaming handlers (`onStdout` / `onStderr`)
19
+ * still receive every chunk regardless of this limit.
20
+ *
21
+ * Defaults to 1 MiB per stream. Set a larger value for commands whose full
22
+ * output you need, bearing in mind the memory cost.
23
+ */
24
+ maxOutputBytes?: number;
15
25
  }
16
26
  /**
17
27
  * Result of command execution.
@@ -0,0 +1,34 @@
1
+ /**
2
+ * Bounded, append-only text buffer that keeps only the tail when the total
3
+ * byte count exceeds `maxBytes`. The head is dropped first, because for
4
+ * command output the tail is almost always the most useful part (error
5
+ * messages, summaries, stack traces, final lines).
6
+ *
7
+ * Chunks are stored as UTF-8 `Buffer`s so byte counting is O(1) per chunk
8
+ * and slicing is exact. Decoding only happens in {@link toString}, which
9
+ * gracefully handles any broken leading multi-byte sequence created by a
10
+ * mid-character slice.
11
+ *
12
+ * Memory is O(maxBytes) regardless of how much data is appended over the
13
+ * lifetime of the buffer.
14
+ */
15
+ export declare class BoundedOutputBuffer {
16
+ private readonly maxBytes;
17
+ private chunks;
18
+ private currentBytes;
19
+ private _totalBytes;
20
+ private _truncated;
21
+ private readonly separatorBuf;
22
+ /**
23
+ * @param maxBytes Maximum retained bytes.
24
+ * @param separator Optional string inserted between consecutive appends
25
+ * (e.g. `'\n'` to replicate the old `join('\n')` behaviour).
26
+ */
27
+ constructor(maxBytes: number, separator?: string);
28
+ append(text: string): void;
29
+ /** Total bytes appended over the buffer's lifetime (not just the bytes currently stored). */
30
+ get totalBytes(): number;
31
+ /** True once any data has been dropped. */
32
+ get truncated(): boolean;
33
+ toString(): string;
34
+ }
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@fastgpt-sdk/sandbox-adapter",
3
- "version": "0.0.34",
3
+ "version": "0.0.36",
4
4
  "description": "Unified abstraction layer for cloud sandbox providers with adapter pattern and feature polyfilling",
5
5
  "type": "module",
6
6
  "main": "./dist/index.cjs",
@@ -48,6 +48,7 @@
48
48
  "author": "",
49
49
  "license": "MIT",
50
50
  "dependencies": {
51
+ "@alibaba-group/opensandbox": "^0.1.5",
51
52
  "@e2b/code-interpreter": "^2.3.3"
52
53
  },
53
54
  "devDependencies": {
@@ -71,7 +72,7 @@
71
72
  "prettier --check"
72
73
  ],
73
74
  "!(.claude)/**/*.{js,cjs,mjs,ts,tsx}": [
74
- "eslint --max-warnings=0 --no-warn-ignored"
75
+ "eslint --max-warnings=0"
75
76
  ]
76
77
  }
77
78
  }