@scelar/nodepod 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +43 -0
- package/README.md +240 -0
- package/dist/child_process-BJOMsZje.js +8233 -0
- package/dist/child_process-BJOMsZje.js.map +1 -0
- package/dist/child_process-Cj8vOcuc.cjs +7434 -0
- package/dist/child_process-Cj8vOcuc.cjs.map +1 -0
- package/dist/index-Cb1Cgdnd.js +35308 -0
- package/dist/index-Cb1Cgdnd.js.map +1 -0
- package/dist/index-DsMGS-xc.cjs +37195 -0
- package/dist/index-DsMGS-xc.cjs.map +1 -0
- package/dist/index.cjs +65 -0
- package/dist/index.cjs.map +1 -0
- package/dist/index.mjs +59 -0
- package/dist/index.mjs.map +1 -0
- package/package.json +95 -0
- package/src/__tests__/smoke.test.ts +11 -0
- package/src/constants/cdn-urls.ts +18 -0
- package/src/constants/config.ts +236 -0
- package/src/cross-origin.ts +26 -0
- package/src/engine-factory.ts +176 -0
- package/src/engine-types.ts +56 -0
- package/src/helpers/byte-encoding.ts +39 -0
- package/src/helpers/digest.ts +9 -0
- package/src/helpers/event-loop.ts +96 -0
- package/src/helpers/wasm-cache.ts +133 -0
- package/src/iframe-sandbox.ts +141 -0
- package/src/index.ts +192 -0
- package/src/isolation-helpers.ts +148 -0
- package/src/memory-volume.ts +941 -0
- package/src/module-transformer.ts +368 -0
- package/src/packages/archive-extractor.ts +248 -0
- package/src/packages/browser-bundler.ts +284 -0
- package/src/packages/installer.ts +396 -0
- package/src/packages/registry-client.ts +131 -0
- package/src/packages/version-resolver.ts +411 -0
- package/src/polyfills/assert.ts +384 -0
- package/src/polyfills/async_hooks.ts +144 -0
- package/src/polyfills/buffer.ts +628 -0
- package/src/polyfills/child_process.ts +2288 -0
- package/src/polyfills/chokidar.ts +336 -0
- package/src/polyfills/cluster.ts +106 -0
- package/src/polyfills/console.ts +136 -0
- package/src/polyfills/constants.ts +123 -0
- package/src/polyfills/crypto.ts +885 -0
- package/src/polyfills/dgram.ts +87 -0
- package/src/polyfills/diagnostics_channel.ts +76 -0
- package/src/polyfills/dns.ts +134 -0
- package/src/polyfills/domain.ts +68 -0
- package/src/polyfills/esbuild.ts +854 -0
- package/src/polyfills/events.ts +276 -0
- package/src/polyfills/fs.ts +2888 -0
- package/src/polyfills/fsevents.ts +79 -0
- package/src/polyfills/http.ts +1449 -0
- package/src/polyfills/http2.ts +199 -0
- package/src/polyfills/https.ts +76 -0
- package/src/polyfills/inspector.ts +62 -0
- package/src/polyfills/lightningcss.ts +105 -0
- package/src/polyfills/module.ts +191 -0
- package/src/polyfills/net.ts +353 -0
- package/src/polyfills/os.ts +238 -0
- package/src/polyfills/path.ts +206 -0
- package/src/polyfills/perf_hooks.ts +102 -0
- package/src/polyfills/process.ts +690 -0
- package/src/polyfills/punycode.ts +159 -0
- package/src/polyfills/querystring.ts +93 -0
- package/src/polyfills/quic.ts +118 -0
- package/src/polyfills/readdirp.ts +229 -0
- package/src/polyfills/readline.ts +692 -0
- package/src/polyfills/repl.ts +134 -0
- package/src/polyfills/rollup.ts +119 -0
- package/src/polyfills/sea.ts +33 -0
- package/src/polyfills/sqlite.ts +78 -0
- package/src/polyfills/stream.ts +1620 -0
- package/src/polyfills/string_decoder.ts +25 -0
- package/src/polyfills/tailwindcss-oxide.ts +309 -0
- package/src/polyfills/test.ts +197 -0
- package/src/polyfills/timers.ts +32 -0
- package/src/polyfills/tls.ts +105 -0
- package/src/polyfills/trace_events.ts +50 -0
- package/src/polyfills/tty.ts +71 -0
- package/src/polyfills/url.ts +174 -0
- package/src/polyfills/util.ts +559 -0
- package/src/polyfills/v8.ts +126 -0
- package/src/polyfills/vm.ts +132 -0
- package/src/polyfills/volume-registry.ts +15 -0
- package/src/polyfills/wasi.ts +44 -0
- package/src/polyfills/worker_threads.ts +326 -0
- package/src/polyfills/ws.ts +595 -0
- package/src/polyfills/zlib.ts +881 -0
- package/src/request-proxy.ts +716 -0
- package/src/script-engine.ts +3375 -0
- package/src/sdk/nodepod-fs.ts +93 -0
- package/src/sdk/nodepod-process.ts +86 -0
- package/src/sdk/nodepod-terminal.ts +350 -0
- package/src/sdk/nodepod.ts +509 -0
- package/src/sdk/types.ts +70 -0
- package/src/shell/commands/bun.ts +121 -0
- package/src/shell/commands/directory.ts +297 -0
- package/src/shell/commands/file-ops.ts +525 -0
- package/src/shell/commands/git.ts +2142 -0
- package/src/shell/commands/node.ts +80 -0
- package/src/shell/commands/npm.ts +198 -0
- package/src/shell/commands/pm-types.ts +45 -0
- package/src/shell/commands/pnpm.ts +82 -0
- package/src/shell/commands/search.ts +264 -0
- package/src/shell/commands/shell-env.ts +352 -0
- package/src/shell/commands/text-processing.ts +1152 -0
- package/src/shell/commands/yarn.ts +84 -0
- package/src/shell/shell-builtins.ts +19 -0
- package/src/shell/shell-helpers.ts +250 -0
- package/src/shell/shell-interpreter.ts +514 -0
- package/src/shell/shell-parser.ts +429 -0
- package/src/shell/shell-types.ts +85 -0
- package/src/syntax-transforms.ts +561 -0
- package/src/threading/engine-worker.ts +64 -0
- package/src/threading/inline-worker.ts +372 -0
- package/src/threading/offload-types.ts +112 -0
- package/src/threading/offload-worker.ts +383 -0
- package/src/threading/offload.ts +271 -0
- package/src/threading/process-context.ts +92 -0
- package/src/threading/process-handle.ts +275 -0
- package/src/threading/process-manager.ts +956 -0
- package/src/threading/process-worker-entry.ts +854 -0
- package/src/threading/shared-vfs.ts +352 -0
- package/src/threading/sync-channel.ts +135 -0
- package/src/threading/task-queue.ts +177 -0
- package/src/threading/vfs-bridge.ts +231 -0
- package/src/threading/worker-pool.ts +233 -0
- package/src/threading/worker-protocol.ts +358 -0
- package/src/threading/worker-vfs.ts +218 -0
- package/src/types/externals.d.ts +38 -0
- package/src/types/fs-streams.ts +142 -0
- package/src/types/manifest.ts +17 -0
- package/src/worker-sandbox.ts +90 -0
|
@@ -0,0 +1,352 @@
|
|
|
1
|
+
// SharedVFS — SharedArrayBuffer-backed VFS for synchronous cross-thread reads.
|
|
2
|
+
//
|
|
3
|
+
// When SAB is available (requires COOP/COEP headers), workers can do
|
|
4
|
+
// fs.readFileSync() directly from shared memory without async IPC.
|
|
5
|
+
// Falls back to snapshot-based approach when SAB is unavailable.
|
|
6
|
+
//
|
|
7
|
+
// Layout: file table (paths -> offset+length) + data region, all in one SAB.
|
|
8
|
+
// Main thread owns the canonical table; workers read from it.
|
|
9
|
+
|
|
10
|
+
/* ------------------------------------------------------------------ */
|
|
11
|
+
/* Feature detection */
|
|
12
|
+
/* ------------------------------------------------------------------ */
|
|
13
|
+
export function isSharedArrayBufferAvailable(): boolean {
|
|
14
|
+
try {
|
|
15
|
+
return (
|
|
16
|
+
typeof SharedArrayBuffer !== 'undefined' &&
|
|
17
|
+
typeof Atomics !== 'undefined'
|
|
18
|
+
);
|
|
19
|
+
} catch {
|
|
20
|
+
return false;
|
|
21
|
+
}
|
|
22
|
+
}
|
|
23
|
+
|
|
24
|
+
/* ------------------------------------------------------------------ */
|
|
25
|
+
/* Shared memory layout */
|
|
26
|
+
/* ------------------------------------------------------------------ */
|
|
27
|
+
|
|
28
|
+
// Per entry, 264 bytes:
|
|
29
|
+
// [0..3] flags [4..7] contentOffset [8..11] contentLength
|
|
30
|
+
// [12..15] modified [16..263] null-terminated path
|
|
31
|
+
const ENTRY_SIZE = 264;
|
|
32
|
+
const ENTRY_FLAGS_OFFSET = 0;
|
|
33
|
+
const ENTRY_CONTENT_OFFSET = 4;
|
|
34
|
+
const ENTRY_CONTENT_LENGTH = 8;
|
|
35
|
+
const ENTRY_MODIFIED_OFFSET = 12;
|
|
36
|
+
const ENTRY_PATH_OFFSET = 16;
|
|
37
|
+
const ENTRY_PATH_MAX = 248;
|
|
38
|
+
|
|
39
|
+
const FLAG_ACTIVE = 1;
|
|
40
|
+
const FLAG_DIRECTORY = 2;
|
|
41
|
+
const FLAG_SYMLINK = 4;
|
|
42
|
+
|
|
43
|
+
// Header: [0] version, [1] entry count, [2] data used, [3] lock
|
|
44
|
+
const HEADER_SIZE = 16;
|
|
45
|
+
const MAX_ENTRIES = 16384;
|
|
46
|
+
const TABLE_SIZE = MAX_ENTRIES * ENTRY_SIZE;
|
|
47
|
+
const DATA_OFFSET = HEADER_SIZE + TABLE_SIZE;
|
|
48
|
+
|
|
49
|
+
const DEFAULT_BUFFER_SIZE = 64 * 1024 * 1024; // 64MB
|
|
50
|
+
|
|
51
|
+
/* ------------------------------------------------------------------ */
|
|
52
|
+
/* FNV-1a hash */
|
|
53
|
+
/* ------------------------------------------------------------------ */
|
|
54
|
+
|
|
55
|
+
function fnv1a(str: string): number {
|
|
56
|
+
let hash = 0x811c9dc5;
|
|
57
|
+
for (let i = 0; i < str.length; i++) {
|
|
58
|
+
hash ^= str.charCodeAt(i);
|
|
59
|
+
hash = (hash * 0x01000193) >>> 0;
|
|
60
|
+
}
|
|
61
|
+
return hash;
|
|
62
|
+
}
|
|
63
|
+
|
|
64
|
+
/* ------------------------------------------------------------------ */
|
|
65
|
+
/* SharedVFSController (main thread) */
|
|
66
|
+
/* ------------------------------------------------------------------ */
|
|
67
|
+
|
|
68
|
+
// Main-thread controller. Owns the SAB and manages the file table.
|
|
69
|
+
export class SharedVFSController {
|
|
70
|
+
private _buffer: SharedArrayBuffer;
|
|
71
|
+
private _view: DataView;
|
|
72
|
+
private _int32: Int32Array;
|
|
73
|
+
private _uint8: Uint8Array;
|
|
74
|
+
private _pathEncoder = new TextEncoder();
|
|
75
|
+
private _pathDecoder = new TextDecoder();
|
|
76
|
+
|
|
77
|
+
constructor(bufferSize: number = DEFAULT_BUFFER_SIZE) {
|
|
78
|
+
if (!isSharedArrayBufferAvailable()) {
|
|
79
|
+
throw new Error('SharedArrayBuffer not available. Ensure COOP/COEP headers are set.');
|
|
80
|
+
}
|
|
81
|
+
|
|
82
|
+
this._buffer = new SharedArrayBuffer(bufferSize);
|
|
83
|
+
this._view = new DataView(this._buffer);
|
|
84
|
+
this._int32 = new Int32Array(this._buffer);
|
|
85
|
+
this._uint8 = new Uint8Array(this._buffer);
|
|
86
|
+
|
|
87
|
+
Atomics.store(this._int32, 0, 0);
|
|
88
|
+
Atomics.store(this._int32, 1, 0);
|
|
89
|
+
this._view.setUint32(8, 0);
|
|
90
|
+
Atomics.store(this._int32, 3, 0);
|
|
91
|
+
}
|
|
92
|
+
|
|
93
|
+
get buffer(): SharedArrayBuffer {
|
|
94
|
+
return this._buffer;
|
|
95
|
+
}
|
|
96
|
+
|
|
97
|
+
writeFile(path: string, content: Uint8Array): boolean {
|
|
98
|
+
this._lock();
|
|
99
|
+
try {
|
|
100
|
+
const entryCount = Atomics.load(this._int32, 1);
|
|
101
|
+
const dataUsed = this._view.getUint32(8);
|
|
102
|
+
|
|
103
|
+
const existingIdx = this._findEntry(path);
|
|
104
|
+
if (existingIdx !== -1) {
|
|
105
|
+
return this._updateEntry(existingIdx, content, dataUsed);
|
|
106
|
+
}
|
|
107
|
+
|
|
108
|
+
if (entryCount >= MAX_ENTRIES) return false;
|
|
109
|
+
if (DATA_OFFSET + dataUsed + content.byteLength > this._buffer.byteLength) return false;
|
|
110
|
+
|
|
111
|
+
const contentOffset = dataUsed;
|
|
112
|
+
this._uint8.set(content, DATA_OFFSET + contentOffset);
|
|
113
|
+
|
|
114
|
+
const entryOffset = HEADER_SIZE + entryCount * ENTRY_SIZE;
|
|
115
|
+
this._view.setUint32(entryOffset + ENTRY_FLAGS_OFFSET, FLAG_ACTIVE);
|
|
116
|
+
this._view.setUint32(entryOffset + ENTRY_CONTENT_OFFSET, contentOffset);
|
|
117
|
+
this._view.setUint32(entryOffset + ENTRY_CONTENT_LENGTH, content.byteLength);
|
|
118
|
+
this._view.setUint32(entryOffset + ENTRY_MODIFIED_OFFSET, (Date.now() / 1000) | 0);
|
|
119
|
+
|
|
120
|
+
const pathBytes = this._pathEncoder.encode(path);
|
|
121
|
+
const pathLen = Math.min(pathBytes.byteLength, ENTRY_PATH_MAX - 1);
|
|
122
|
+
this._uint8.set(pathBytes.subarray(0, pathLen), entryOffset + ENTRY_PATH_OFFSET);
|
|
123
|
+
this._uint8[entryOffset + ENTRY_PATH_OFFSET + pathLen] = 0;
|
|
124
|
+
|
|
125
|
+
Atomics.store(this._int32, 1, entryCount + 1);
|
|
126
|
+
this._view.setUint32(8, dataUsed + content.byteLength);
|
|
127
|
+
|
|
128
|
+
Atomics.add(this._int32, 0, 1);
|
|
129
|
+
Atomics.notify(this._int32, 0);
|
|
130
|
+
|
|
131
|
+
return true;
|
|
132
|
+
} finally {
|
|
133
|
+
this._unlock();
|
|
134
|
+
}
|
|
135
|
+
}
|
|
136
|
+
|
|
137
|
+
writeDirectory(path: string): boolean {
|
|
138
|
+
this._lock();
|
|
139
|
+
try {
|
|
140
|
+
const entryCount = Atomics.load(this._int32, 1);
|
|
141
|
+
if (entryCount >= MAX_ENTRIES) return false;
|
|
142
|
+
if (this._findEntry(path) !== -1) return true;
|
|
143
|
+
|
|
144
|
+
const entryOffset = HEADER_SIZE + entryCount * ENTRY_SIZE;
|
|
145
|
+
this._view.setUint32(entryOffset + ENTRY_FLAGS_OFFSET, FLAG_ACTIVE | FLAG_DIRECTORY);
|
|
146
|
+
this._view.setUint32(entryOffset + ENTRY_CONTENT_OFFSET, 0);
|
|
147
|
+
this._view.setUint32(entryOffset + ENTRY_CONTENT_LENGTH, 0);
|
|
148
|
+
this._view.setUint32(entryOffset + ENTRY_MODIFIED_OFFSET, (Date.now() / 1000) | 0);
|
|
149
|
+
|
|
150
|
+
const pathBytes = this._pathEncoder.encode(path);
|
|
151
|
+
const pathLen = Math.min(pathBytes.byteLength, ENTRY_PATH_MAX - 1);
|
|
152
|
+
this._uint8.set(pathBytes.subarray(0, pathLen), entryOffset + ENTRY_PATH_OFFSET);
|
|
153
|
+
this._uint8[entryOffset + ENTRY_PATH_OFFSET + pathLen] = 0;
|
|
154
|
+
|
|
155
|
+
Atomics.store(this._int32, 1, entryCount + 1);
|
|
156
|
+
Atomics.add(this._int32, 0, 1);
|
|
157
|
+
Atomics.notify(this._int32, 0);
|
|
158
|
+
|
|
159
|
+
return true;
|
|
160
|
+
} finally {
|
|
161
|
+
this._unlock();
|
|
162
|
+
}
|
|
163
|
+
}
|
|
164
|
+
|
|
165
|
+
deleteFile(path: string): boolean {
|
|
166
|
+
this._lock();
|
|
167
|
+
try {
|
|
168
|
+
const idx = this._findEntry(path);
|
|
169
|
+
if (idx === -1) return false;
|
|
170
|
+
|
|
171
|
+
const entryOffset = HEADER_SIZE + idx * ENTRY_SIZE;
|
|
172
|
+
this._view.setUint32(entryOffset + ENTRY_FLAGS_OFFSET, 0);
|
|
173
|
+
|
|
174
|
+
Atomics.add(this._int32, 0, 1);
|
|
175
|
+
Atomics.notify(this._int32, 0);
|
|
176
|
+
|
|
177
|
+
return true;
|
|
178
|
+
} finally {
|
|
179
|
+
this._unlock();
|
|
180
|
+
}
|
|
181
|
+
}
|
|
182
|
+
|
|
183
|
+
readFile(path: string): Uint8Array | null {
|
|
184
|
+
const idx = this._findEntry(path);
|
|
185
|
+
if (idx === -1) return null;
|
|
186
|
+
|
|
187
|
+
const entryOffset = HEADER_SIZE + idx * ENTRY_SIZE;
|
|
188
|
+
const flags = this._view.getUint32(entryOffset + ENTRY_FLAGS_OFFSET);
|
|
189
|
+
if (!(flags & FLAG_ACTIVE) || (flags & FLAG_DIRECTORY)) return null;
|
|
190
|
+
|
|
191
|
+
const contentOffset = this._view.getUint32(entryOffset + ENTRY_CONTENT_OFFSET);
|
|
192
|
+
const contentLength = this._view.getUint32(entryOffset + ENTRY_CONTENT_LENGTH);
|
|
193
|
+
|
|
194
|
+
// Copy, not view, to avoid races
|
|
195
|
+
return new Uint8Array(this._uint8.slice(
|
|
196
|
+
DATA_OFFSET + contentOffset,
|
|
197
|
+
DATA_OFFSET + contentOffset + contentLength,
|
|
198
|
+
));
|
|
199
|
+
}
|
|
200
|
+
|
|
201
|
+
exists(path: string): boolean {
|
|
202
|
+
return this._findEntry(path) !== -1;
|
|
203
|
+
}
|
|
204
|
+
|
|
205
|
+
get version(): number {
|
|
206
|
+
return Atomics.load(this._int32, 0);
|
|
207
|
+
}
|
|
208
|
+
|
|
209
|
+
/* ---- Internal ---- */
|
|
210
|
+
|
|
211
|
+
private _findEntry(path: string): number {
|
|
212
|
+
const entryCount = Atomics.load(this._int32, 1);
|
|
213
|
+
const pathBytes = this._pathEncoder.encode(path);
|
|
214
|
+
|
|
215
|
+
for (let i = 0; i < entryCount; i++) {
|
|
216
|
+
const entryOffset = HEADER_SIZE + i * ENTRY_SIZE;
|
|
217
|
+
const flags = this._view.getUint32(entryOffset + ENTRY_FLAGS_OFFSET);
|
|
218
|
+
if (!(flags & FLAG_ACTIVE)) continue;
|
|
219
|
+
|
|
220
|
+
let match = true;
|
|
221
|
+
for (let j = 0; j < pathBytes.byteLength; j++) {
|
|
222
|
+
if (this._uint8[entryOffset + ENTRY_PATH_OFFSET + j] !== pathBytes[j]) {
|
|
223
|
+
match = false;
|
|
224
|
+
break;
|
|
225
|
+
}
|
|
226
|
+
}
|
|
227
|
+
if (match && this._uint8[entryOffset + ENTRY_PATH_OFFSET + pathBytes.byteLength] === 0) {
|
|
228
|
+
return i;
|
|
229
|
+
}
|
|
230
|
+
}
|
|
231
|
+
return -1;
|
|
232
|
+
}
|
|
233
|
+
|
|
234
|
+
private _updateEntry(idx: number, content: Uint8Array, dataUsed: number): boolean {
|
|
235
|
+
if (DATA_OFFSET + dataUsed + content.byteLength > this._buffer.byteLength) return false;
|
|
236
|
+
|
|
237
|
+
const entryOffset = HEADER_SIZE + idx * ENTRY_SIZE;
|
|
238
|
+
|
|
239
|
+
// Append-only — don't reuse old space to avoid races
|
|
240
|
+
const contentOffset = dataUsed;
|
|
241
|
+
this._uint8.set(content, DATA_OFFSET + contentOffset);
|
|
242
|
+
|
|
243
|
+
this._view.setUint32(entryOffset + ENTRY_CONTENT_OFFSET, contentOffset);
|
|
244
|
+
this._view.setUint32(entryOffset + ENTRY_CONTENT_LENGTH, content.byteLength);
|
|
245
|
+
this._view.setUint32(entryOffset + ENTRY_MODIFIED_OFFSET, (Date.now() / 1000) | 0);
|
|
246
|
+
|
|
247
|
+
this._view.setUint32(8, dataUsed + content.byteLength);
|
|
248
|
+
Atomics.add(this._int32, 0, 1);
|
|
249
|
+
Atomics.notify(this._int32, 0);
|
|
250
|
+
|
|
251
|
+
return true;
|
|
252
|
+
}
|
|
253
|
+
|
|
254
|
+
private _lock(): void {
|
|
255
|
+
while (Atomics.compareExchange(this._int32, 3, 0, 1) !== 0) {
|
|
256
|
+
Atomics.wait(this._int32, 3, 1, 1);
|
|
257
|
+
}
|
|
258
|
+
}
|
|
259
|
+
|
|
260
|
+
private _unlock(): void {
|
|
261
|
+
Atomics.store(this._int32, 3, 0);
|
|
262
|
+
Atomics.notify(this._int32, 3);
|
|
263
|
+
}
|
|
264
|
+
}
|
|
265
|
+
|
|
266
|
+
/* ------------------------------------------------------------------ */
|
|
267
|
+
/* SharedVFSReader (worker thread) */
|
|
268
|
+
/* ------------------------------------------------------------------ */
|
|
269
|
+
|
|
270
|
+
// Worker-thread reader. Synchronous read access to shared memory.
|
|
271
|
+
export class SharedVFSReader {
|
|
272
|
+
private _buffer: SharedArrayBuffer;
|
|
273
|
+
private _view: DataView;
|
|
274
|
+
private _int32: Int32Array;
|
|
275
|
+
private _uint8: Uint8Array;
|
|
276
|
+
private _pathEncoder = new TextEncoder();
|
|
277
|
+
private _pathDecoder = new TextDecoder();
|
|
278
|
+
|
|
279
|
+
constructor(buffer: SharedArrayBuffer) {
|
|
280
|
+
this._buffer = buffer;
|
|
281
|
+
this._view = new DataView(buffer);
|
|
282
|
+
this._int32 = new Int32Array(buffer);
|
|
283
|
+
this._uint8 = new Uint8Array(buffer);
|
|
284
|
+
}
|
|
285
|
+
|
|
286
|
+
readFileSync(path: string): Uint8Array | null {
|
|
287
|
+
const idx = this._findEntry(path);
|
|
288
|
+
if (idx === -1) return null;
|
|
289
|
+
|
|
290
|
+
const entryOffset = HEADER_SIZE + idx * ENTRY_SIZE;
|
|
291
|
+
const flags = this._view.getUint32(entryOffset + ENTRY_FLAGS_OFFSET);
|
|
292
|
+
if (!(flags & FLAG_ACTIVE) || (flags & FLAG_DIRECTORY)) return null;
|
|
293
|
+
|
|
294
|
+
const contentOffset = this._view.getUint32(entryOffset + ENTRY_CONTENT_OFFSET);
|
|
295
|
+
const contentLength = this._view.getUint32(entryOffset + ENTRY_CONTENT_LENGTH);
|
|
296
|
+
|
|
297
|
+
const result = new Uint8Array(contentLength);
|
|
298
|
+
result.set(this._uint8.subarray(
|
|
299
|
+
DATA_OFFSET + contentOffset,
|
|
300
|
+
DATA_OFFSET + contentOffset + contentLength,
|
|
301
|
+
));
|
|
302
|
+
return result;
|
|
303
|
+
}
|
|
304
|
+
|
|
305
|
+
existsSync(path: string): boolean {
|
|
306
|
+
return this._findEntry(path) !== -1;
|
|
307
|
+
}
|
|
308
|
+
|
|
309
|
+
isDirectorySync(path: string): boolean {
|
|
310
|
+
const idx = this._findEntry(path);
|
|
311
|
+
if (idx === -1) return false;
|
|
312
|
+
const entryOffset = HEADER_SIZE + idx * ENTRY_SIZE;
|
|
313
|
+
const flags = this._view.getUint32(entryOffset + ENTRY_FLAGS_OFFSET);
|
|
314
|
+
return (flags & FLAG_ACTIVE) !== 0 && (flags & FLAG_DIRECTORY) !== 0;
|
|
315
|
+
}
|
|
316
|
+
|
|
317
|
+
get version(): number {
|
|
318
|
+
return Atomics.load(this._int32, 0);
|
|
319
|
+
}
|
|
320
|
+
|
|
321
|
+
// Blocks until version changes. Returns new version or -1 on timeout.
|
|
322
|
+
waitForChange(currentVersion: number, timeoutMs: number = 5000): number {
|
|
323
|
+
const result = Atomics.wait(this._int32, 0, currentVersion, timeoutMs);
|
|
324
|
+
if (result === 'timed-out') return -1;
|
|
325
|
+
return Atomics.load(this._int32, 0);
|
|
326
|
+
}
|
|
327
|
+
|
|
328
|
+
/* ---- Internal ---- */
|
|
329
|
+
|
|
330
|
+
private _findEntry(path: string): number {
|
|
331
|
+
const entryCount = Atomics.load(this._int32, 1);
|
|
332
|
+
const pathBytes = this._pathEncoder.encode(path);
|
|
333
|
+
|
|
334
|
+
for (let i = 0; i < entryCount; i++) {
|
|
335
|
+
const entryOffset = HEADER_SIZE + i * ENTRY_SIZE;
|
|
336
|
+
const flags = this._view.getUint32(entryOffset + ENTRY_FLAGS_OFFSET);
|
|
337
|
+
if (!(flags & FLAG_ACTIVE)) continue;
|
|
338
|
+
|
|
339
|
+
let match = true;
|
|
340
|
+
for (let j = 0; j < pathBytes.byteLength; j++) {
|
|
341
|
+
if (this._uint8[entryOffset + ENTRY_PATH_OFFSET + j] !== pathBytes[j]) {
|
|
342
|
+
match = false;
|
|
343
|
+
break;
|
|
344
|
+
}
|
|
345
|
+
}
|
|
346
|
+
if (match && this._uint8[entryOffset + ENTRY_PATH_OFFSET + pathBytes.byteLength] === 0) {
|
|
347
|
+
return i;
|
|
348
|
+
}
|
|
349
|
+
}
|
|
350
|
+
return -1;
|
|
351
|
+
}
|
|
352
|
+
}
|
|
@@ -0,0 +1,135 @@
|
|
|
1
|
+
// SyncChannel — true blocking execSync/spawnSync via SharedArrayBuffer + Atomics.
|
|
2
|
+
// Worker allocates a slot, posts spawn-sync, blocks on Atomics.wait().
|
|
3
|
+
// Main thread runs the child, writes result to the slot, calls Atomics.notify().
|
|
4
|
+
|
|
5
|
+
import { isSharedArrayBufferAvailable } from "./shared-vfs";
|
|
6
|
+
|
|
7
|
+
// --- Shared memory layout ---
|
|
8
|
+
//
|
|
9
|
+
// Per-slot (16KB = 4096 Int32s):
|
|
10
|
+
// [0] status (0=pending, 1=complete, 2=error)
|
|
11
|
+
// [1] exit code
|
|
12
|
+
// [2] stdout byte length
|
|
13
|
+
// [3..4095] stdout data
|
|
14
|
+
//
|
|
15
|
+
// Last Int32 at MAX_SLOTS * SLOT_SIZE is the atomic allocation counter.
|
|
16
|
+
export const SLOT_SIZE = 4096; // 4096 Int32 values = 16KB per slot
|
|
17
|
+
export const MAX_SLOTS = 64;
|
|
18
|
+
const STATUS_PENDING = 0;
|
|
19
|
+
const STATUS_COMPLETE = 1;
|
|
20
|
+
const STATUS_ERROR = 2;
|
|
21
|
+
|
|
22
|
+
const DEFAULT_SYNC_BUFFER_SIZE = MAX_SLOTS * SLOT_SIZE * 4 + 4; // ~1MB
|
|
23
|
+
const COUNTER_INDEX = MAX_SLOTS * SLOT_SIZE;
|
|
24
|
+
|
|
25
|
+
// --- SyncChannelController (main thread) ---
|
|
26
|
+
export class SyncChannelController {
|
|
27
|
+
private _buffer: SharedArrayBuffer;
|
|
28
|
+
private _int32: Int32Array;
|
|
29
|
+
private _uint8: Uint8Array;
|
|
30
|
+
|
|
31
|
+
constructor(bufferSize: number = DEFAULT_SYNC_BUFFER_SIZE) {
|
|
32
|
+
if (!isSharedArrayBufferAvailable()) {
|
|
33
|
+
throw new Error("SharedArrayBuffer not available. Ensure COOP/COEP headers are set.");
|
|
34
|
+
}
|
|
35
|
+
|
|
36
|
+
this._buffer = new SharedArrayBuffer(bufferSize);
|
|
37
|
+
this._int32 = new Int32Array(this._buffer);
|
|
38
|
+
this._uint8 = new Uint8Array(this._buffer);
|
|
39
|
+
|
|
40
|
+
for (let i = 0; i < MAX_SLOTS; i++) {
|
|
41
|
+
Atomics.store(this._int32, i * SLOT_SIZE, STATUS_PENDING);
|
|
42
|
+
}
|
|
43
|
+
Atomics.store(this._int32, COUNTER_INDEX, 0);
|
|
44
|
+
}
|
|
45
|
+
|
|
46
|
+
get buffer(): SharedArrayBuffer {
|
|
47
|
+
return this._buffer;
|
|
48
|
+
}
|
|
49
|
+
|
|
50
|
+
writeResult(syncSlot: number, exitCode: number, stdout: string): void {
|
|
51
|
+
const base = syncSlot * SLOT_SIZE;
|
|
52
|
+
|
|
53
|
+
Atomics.store(this._int32, base + 1, exitCode);
|
|
54
|
+
|
|
55
|
+
const encoder = new TextEncoder();
|
|
56
|
+
const stdoutBytes = encoder.encode(stdout);
|
|
57
|
+
const maxStdoutLen = (SLOT_SIZE - 3) * 4;
|
|
58
|
+
const truncatedLen = Math.min(stdoutBytes.byteLength, maxStdoutLen);
|
|
59
|
+
|
|
60
|
+
Atomics.store(this._int32, base + 2, truncatedLen);
|
|
61
|
+
|
|
62
|
+
const dataOffset = (base + 3) * 4;
|
|
63
|
+
this._uint8.set(stdoutBytes.subarray(0, truncatedLen), dataOffset);
|
|
64
|
+
|
|
65
|
+
// Must be last — wakes the waiting worker
|
|
66
|
+
Atomics.store(this._int32, base, STATUS_COMPLETE);
|
|
67
|
+
Atomics.notify(this._int32, base);
|
|
68
|
+
}
|
|
69
|
+
|
|
70
|
+
writeError(syncSlot: number, exitCode: number, errorMessage: string): void {
|
|
71
|
+
const base = syncSlot * SLOT_SIZE;
|
|
72
|
+
|
|
73
|
+
Atomics.store(this._int32, base + 1, exitCode);
|
|
74
|
+
|
|
75
|
+
const encoder = new TextEncoder();
|
|
76
|
+
const errorBytes = encoder.encode(errorMessage);
|
|
77
|
+
const maxLen = (SLOT_SIZE - 3) * 4;
|
|
78
|
+
const truncatedLen = Math.min(errorBytes.byteLength, maxLen);
|
|
79
|
+
|
|
80
|
+
Atomics.store(this._int32, base + 2, truncatedLen);
|
|
81
|
+
const dataOffset = (base + 3) * 4;
|
|
82
|
+
this._uint8.set(errorBytes.subarray(0, truncatedLen), dataOffset);
|
|
83
|
+
|
|
84
|
+
Atomics.store(this._int32, base, STATUS_ERROR);
|
|
85
|
+
Atomics.notify(this._int32, base);
|
|
86
|
+
}
|
|
87
|
+
}
|
|
88
|
+
|
|
89
|
+
// --- SyncChannelWorker (worker thread) ---
|
|
90
|
+
export class SyncChannelWorker {
|
|
91
|
+
private _int32: Int32Array;
|
|
92
|
+
private _uint8: Uint8Array;
|
|
93
|
+
|
|
94
|
+
constructor(buffer: SharedArrayBuffer) {
|
|
95
|
+
this._int32 = new Int32Array(buffer);
|
|
96
|
+
this._uint8 = new Uint8Array(buffer);
|
|
97
|
+
}
|
|
98
|
+
|
|
99
|
+
// Atomic counter prevents slot collisions across workers
|
|
100
|
+
allocateSlot(): number {
|
|
101
|
+
const raw = Atomics.add(this._int32, COUNTER_INDEX, 1);
|
|
102
|
+
const slot = raw % MAX_SLOTS;
|
|
103
|
+
Atomics.store(this._int32, slot * SLOT_SIZE, STATUS_PENDING);
|
|
104
|
+
return slot;
|
|
105
|
+
}
|
|
106
|
+
|
|
107
|
+
// Blocks the worker thread until main writes the result
|
|
108
|
+
waitForResult(syncSlot: number, timeoutMs: number = 120_000): { exitCode: number; stdout: string } {
|
|
109
|
+
const base = syncSlot * SLOT_SIZE;
|
|
110
|
+
|
|
111
|
+
const result = Atomics.wait(this._int32, base, STATUS_PENDING, timeoutMs);
|
|
112
|
+
|
|
113
|
+
if (result === "timed-out") {
|
|
114
|
+
throw new Error("execSync timed out");
|
|
115
|
+
}
|
|
116
|
+
|
|
117
|
+
const status = Atomics.load(this._int32, base);
|
|
118
|
+
const exitCode = Atomics.load(this._int32, base + 1);
|
|
119
|
+
const stdoutLen = Atomics.load(this._int32, base + 2);
|
|
120
|
+
|
|
121
|
+
const decoder = new TextDecoder();
|
|
122
|
+
const dataOffset = (base + 3) * 4;
|
|
123
|
+
const stdoutBytes = this._uint8.slice(dataOffset, dataOffset + stdoutLen);
|
|
124
|
+
const stdout = decoder.decode(stdoutBytes);
|
|
125
|
+
|
|
126
|
+
if (status === STATUS_ERROR) {
|
|
127
|
+
const err = new Error(`Command failed with exit code ${exitCode}\n${stdout}`);
|
|
128
|
+
(err as any).status = exitCode;
|
|
129
|
+
(err as any).stdout = stdout;
|
|
130
|
+
throw err;
|
|
131
|
+
}
|
|
132
|
+
|
|
133
|
+
return { exitCode, stdout };
|
|
134
|
+
}
|
|
135
|
+
}
|
|
@@ -0,0 +1,177 @@
|
|
|
1
|
+
// TaskQueue — priority-sorted queue that dispatches tasks to the WorkerPool.
|
|
2
|
+
|
|
3
|
+
import type { WorkerPool } from "./worker-pool";
|
|
4
|
+
import type {
|
|
5
|
+
OffloadTask,
|
|
6
|
+
OffloadResult,
|
|
7
|
+
TransformTask,
|
|
8
|
+
TransformResult,
|
|
9
|
+
ExtractTask,
|
|
10
|
+
ExtractResult,
|
|
11
|
+
BuildTask,
|
|
12
|
+
BuildResult,
|
|
13
|
+
} from "./offload-types";
|
|
14
|
+
|
|
15
|
+
// --- Internal types ---
|
|
16
|
+
|
|
17
|
+
interface QueuedTask {
|
|
18
|
+
task: OffloadTask;
|
|
19
|
+
resolve: (result: any) => void;
|
|
20
|
+
reject: (error: Error) => void;
|
|
21
|
+
cancelled: boolean;
|
|
22
|
+
}
|
|
23
|
+
|
|
24
|
+
// --- TaskQueue ---
|
|
25
|
+
|
|
26
|
+
export class TaskQueue {
|
|
27
|
+
private queue: QueuedTask[] = [];
|
|
28
|
+
private pool: WorkerPool;
|
|
29
|
+
private dispatching = false;
|
|
30
|
+
|
|
31
|
+
constructor(pool: WorkerPool) {
|
|
32
|
+
this.pool = pool;
|
|
33
|
+
}
|
|
34
|
+
|
|
35
|
+
// --- Public API ---
|
|
36
|
+
|
|
37
|
+
submit<T extends OffloadTask>(
|
|
38
|
+
task: T,
|
|
39
|
+
): Promise<
|
|
40
|
+
T extends TransformTask
|
|
41
|
+
? TransformResult
|
|
42
|
+
: T extends ExtractTask
|
|
43
|
+
? ExtractResult
|
|
44
|
+
: T extends BuildTask
|
|
45
|
+
? BuildResult
|
|
46
|
+
: OffloadResult
|
|
47
|
+
> {
|
|
48
|
+
return new Promise((resolve, reject) => {
|
|
49
|
+
const queued: QueuedTask = {
|
|
50
|
+
task,
|
|
51
|
+
resolve,
|
|
52
|
+
reject,
|
|
53
|
+
cancelled: false,
|
|
54
|
+
};
|
|
55
|
+
|
|
56
|
+
// Lower number = higher priority
|
|
57
|
+
const idx = this.queue.findIndex(
|
|
58
|
+
(q) => q.task.priority > task.priority,
|
|
59
|
+
);
|
|
60
|
+
if (idx === -1) {
|
|
61
|
+
this.queue.push(queued);
|
|
62
|
+
} else {
|
|
63
|
+
this.queue.splice(idx, 0, queued);
|
|
64
|
+
}
|
|
65
|
+
|
|
66
|
+
this.dispatch();
|
|
67
|
+
});
|
|
68
|
+
}
|
|
69
|
+
|
|
70
|
+
submitBatch(tasks: OffloadTask[]): Promise<OffloadResult[]> {
|
|
71
|
+
return Promise.all(tasks.map((t) => this.submit(t)));
|
|
72
|
+
}
|
|
73
|
+
|
|
74
|
+
cancel(taskId: string): boolean {
|
|
75
|
+
const idx = this.queue.findIndex(
|
|
76
|
+
(q) => q.task.id === taskId && !q.cancelled,
|
|
77
|
+
);
|
|
78
|
+
if (idx >= 0) {
|
|
79
|
+
const queued = this.queue[idx];
|
|
80
|
+
queued.cancelled = true;
|
|
81
|
+
queued.reject(new Error(`Task ${taskId} cancelled`));
|
|
82
|
+
this.queue.splice(idx, 1);
|
|
83
|
+
return true;
|
|
84
|
+
}
|
|
85
|
+
return false;
|
|
86
|
+
}
|
|
87
|
+
|
|
88
|
+
get pending(): number {
|
|
89
|
+
return this.queue.length;
|
|
90
|
+
}
|
|
91
|
+
|
|
92
|
+
// --- Internal ---
|
|
93
|
+
|
|
94
|
+
private async dispatch(): Promise<void> {
|
|
95
|
+
if (this.dispatching) return;
|
|
96
|
+
this.dispatching = true;
|
|
97
|
+
|
|
98
|
+
try {
|
|
99
|
+
while (this.queue.length > 0) {
|
|
100
|
+
const queued = this.queue.shift()!;
|
|
101
|
+
if (queued.cancelled) continue;
|
|
102
|
+
|
|
103
|
+
let worker: any;
|
|
104
|
+
let release: (() => void) | undefined;
|
|
105
|
+
try {
|
|
106
|
+
const acquired = await this.pool.acquire();
|
|
107
|
+
worker = acquired.worker;
|
|
108
|
+
release = acquired.release;
|
|
109
|
+
} catch (err) {
|
|
110
|
+
// Pool broken — reject everything so offload() can fall back to main thread
|
|
111
|
+
const reason =
|
|
112
|
+
err instanceof Error ? err : new Error(String(err));
|
|
113
|
+
queued.reject(reason);
|
|
114
|
+
this.rejectAll(reason);
|
|
115
|
+
return;
|
|
116
|
+
}
|
|
117
|
+
|
|
118
|
+
// Don't block the dispatch loop — fire and re-trigger on completion
|
|
119
|
+
this.executeTask(worker.endpoint, queued).finally(() => {
|
|
120
|
+
release!();
|
|
121
|
+
// More tasks may have arrived while we were busy
|
|
122
|
+
if (this.queue.length > 0) {
|
|
123
|
+
this.dispatching = false;
|
|
124
|
+
this.dispatch();
|
|
125
|
+
}
|
|
126
|
+
});
|
|
127
|
+
}
|
|
128
|
+
} finally {
|
|
129
|
+
this.dispatching = false;
|
|
130
|
+
}
|
|
131
|
+
}
|
|
132
|
+
|
|
133
|
+
private rejectAll(reason: Error): void {
|
|
134
|
+
const pending = this.queue.splice(0);
|
|
135
|
+
for (const q of pending) {
|
|
136
|
+
if (!q.cancelled) {
|
|
137
|
+
q.reject(reason);
|
|
138
|
+
}
|
|
139
|
+
}
|
|
140
|
+
}
|
|
141
|
+
|
|
142
|
+
private async executeTask(
|
|
143
|
+
endpoint: any,
|
|
144
|
+
queued: QueuedTask,
|
|
145
|
+
): Promise<void> {
|
|
146
|
+
if (queued.cancelled) return;
|
|
147
|
+
|
|
148
|
+
try {
|
|
149
|
+
let result: OffloadResult;
|
|
150
|
+
switch (queued.task.type) {
|
|
151
|
+
case "transform":
|
|
152
|
+
result = await endpoint.transform(queued.task);
|
|
153
|
+
break;
|
|
154
|
+
case "extract":
|
|
155
|
+
result = await endpoint.extract(queued.task);
|
|
156
|
+
break;
|
|
157
|
+
case "build":
|
|
158
|
+
result = await endpoint.build(queued.task);
|
|
159
|
+
break;
|
|
160
|
+
default:
|
|
161
|
+
throw new Error(
|
|
162
|
+
`Unknown task type: ${(queued.task as any).type}`,
|
|
163
|
+
);
|
|
164
|
+
}
|
|
165
|
+
|
|
166
|
+
if (!queued.cancelled) {
|
|
167
|
+
queued.resolve(result);
|
|
168
|
+
}
|
|
169
|
+
} catch (err) {
|
|
170
|
+
if (!queued.cancelled) {
|
|
171
|
+
queued.reject(
|
|
172
|
+
err instanceof Error ? err : new Error(String(err)),
|
|
173
|
+
);
|
|
174
|
+
}
|
|
175
|
+
}
|
|
176
|
+
}
|
|
177
|
+
}
|