@scelar/nodepod 1.0.2 → 1.0.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/__sw__.js +642 -642
- package/dist/__tests__/bench/integration.bench.d.ts +1 -0
- package/dist/__tests__/bench/memory-volume.bench.d.ts +1 -0
- package/dist/__tests__/bench/polyfills.bench.d.ts +1 -0
- package/dist/__tests__/bench/script-engine.bench.d.ts +1 -0
- package/dist/__tests__/bench/shell.bench.d.ts +1 -0
- package/dist/__tests__/bench/syntax-transforms.bench.d.ts +1 -0
- package/dist/__tests__/bench/version-resolver.bench.d.ts +1 -0
- package/dist/__tests__/buffer.test.d.ts +1 -0
- package/dist/__tests__/byte-encoding.test.d.ts +1 -0
- package/dist/__tests__/digest.test.d.ts +1 -0
- package/dist/__tests__/events.test.d.ts +1 -0
- package/dist/__tests__/memory-volume.test.d.ts +1 -0
- package/dist/__tests__/path.test.d.ts +1 -0
- package/dist/__tests__/process.test.d.ts +1 -0
- package/dist/__tests__/script-engine.test.d.ts +1 -0
- package/dist/__tests__/shell-builtins.test.d.ts +1 -0
- package/dist/__tests__/shell-interpreter.test.d.ts +1 -0
- package/dist/__tests__/shell-parser.test.d.ts +1 -0
- package/dist/__tests__/stream.test.d.ts +1 -0
- package/dist/__tests__/syntax-transforms.test.d.ts +1 -0
- package/dist/__tests__/version-resolver.test.d.ts +1 -0
- package/dist/{child_process-Dopvyd-E.js → child_process-53fMkug_.js} +4 -4
- package/dist/child_process-53fMkug_.js.map +1 -0
- package/dist/{child_process-B38qoN6R.cjs → child_process-lxSKECHq.cjs} +5 -5
- package/dist/child_process-lxSKECHq.cjs.map +1 -0
- package/dist/{index--Qr8LVpQ.js → index-B8lyh_ti.js} +1316 -559
- package/dist/index-B8lyh_ti.js.map +1 -0
- package/dist/{index-cnitc68U.cjs → index-C-TQIrdG.cjs} +1422 -612
- package/dist/index-C-TQIrdG.cjs.map +1 -0
- package/dist/index.cjs +1 -1
- package/dist/index.mjs +1 -1
- package/dist/memory-volume.d.ts +1 -1
- package/dist/polyfills/wasi.d.ts +45 -4
- package/dist/script-engine.d.ts +2 -0
- package/dist/sdk/nodepod.d.ts +4 -3
- package/dist/sdk/types.d.ts +6 -0
- package/dist/syntax-transforms.d.ts +1 -0
- package/dist/threading/process-manager.d.ts +1 -1
- package/dist/threading/worker-protocol.d.ts +1 -1
- package/package.json +5 -3
- package/src/__tests__/bench/integration.bench.ts +117 -0
- package/src/__tests__/bench/memory-volume.bench.ts +115 -0
- package/src/__tests__/bench/polyfills.bench.ts +147 -0
- package/src/__tests__/bench/script-engine.bench.ts +104 -0
- package/src/__tests__/bench/shell.bench.ts +101 -0
- package/src/__tests__/bench/syntax-transforms.bench.ts +82 -0
- package/src/__tests__/bench/version-resolver.bench.ts +95 -0
- package/src/__tests__/buffer.test.ts +273 -0
- package/src/__tests__/byte-encoding.test.ts +98 -0
- package/src/__tests__/digest.test.ts +44 -0
- package/src/__tests__/events.test.ts +245 -0
- package/src/__tests__/memory-volume.test.ts +443 -0
- package/src/__tests__/path.test.ts +181 -0
- package/src/__tests__/process.test.ts +129 -0
- package/src/__tests__/script-engine.test.ts +229 -0
- package/src/__tests__/shell-builtins.test.ts +357 -0
- package/src/__tests__/shell-interpreter.test.ts +157 -0
- package/src/__tests__/shell-parser.test.ts +204 -0
- package/src/__tests__/stream.test.ts +142 -0
- package/src/__tests__/syntax-transforms.test.ts +158 -0
- package/src/__tests__/version-resolver.test.ts +184 -0
- package/src/constants/cdn-urls.ts +18 -18
- package/src/helpers/byte-encoding.ts +51 -39
- package/src/index.ts +192 -192
- package/src/memory-volume.ts +968 -941
- package/src/module-transformer.ts +368 -368
- package/src/packages/installer.ts +396 -396
- package/src/packages/version-resolver.ts +12 -2
- package/src/polyfills/buffer.ts +633 -628
- package/src/polyfills/child_process.ts +2288 -2288
- package/src/polyfills/esbuild.ts +854 -854
- package/src/polyfills/events.ts +282 -276
- package/src/polyfills/fs.ts +2888 -2888
- package/src/polyfills/http.ts +1450 -1449
- package/src/polyfills/process.ts +721 -690
- package/src/polyfills/readline.ts +692 -692
- package/src/polyfills/stream.ts +1620 -1620
- package/src/polyfills/tty.ts +71 -71
- package/src/polyfills/wasi.ts +1284 -22
- package/src/request-proxy.ts +716 -716
- package/src/script-engine.ts +465 -146
- package/src/sdk/nodepod.ts +525 -509
- package/src/sdk/types.ts +7 -0
- package/src/syntax-transforms.ts +543 -561
- package/src/threading/offload-worker.ts +383 -383
- package/src/threading/offload.ts +271 -271
- package/src/threading/process-manager.ts +956 -956
- package/src/threading/process-worker-entry.ts +858 -854
- package/src/threading/worker-protocol.ts +1 -1
- package/dist/child_process-B38qoN6R.cjs.map +0 -1
- package/dist/child_process-Dopvyd-E.js.map +0 -1
- package/dist/index--Qr8LVpQ.js.map +0 -1
- package/dist/index-cnitc68U.cjs.map +0 -1
|
@@ -1,383 +1,383 @@
|
|
|
1
|
-
// Offload Worker entry point — runs transform/extract/build tasks in a dedicated thread.
|
|
2
|
-
// Tar parser and base64 helpers are duplicated here since workers can't share module state.
|
|
3
|
-
|
|
4
|
-
import { expose } from "comlink";
|
|
5
|
-
import type {
|
|
6
|
-
OffloadWorkerEndpoint,
|
|
7
|
-
TransformTask,
|
|
8
|
-
TransformResult,
|
|
9
|
-
ExtractTask,
|
|
10
|
-
ExtractResult,
|
|
11
|
-
ExtractedFile,
|
|
12
|
-
BuildTask,
|
|
13
|
-
BuildResult,
|
|
14
|
-
BuildOutputFile,
|
|
15
|
-
} from "./offload-types";
|
|
16
|
-
|
|
17
|
-
let esbuildEngine: any = null;
|
|
18
|
-
let pakoModule: any = null;
|
|
19
|
-
let initialized = false;
|
|
20
|
-
|
|
21
|
-
import { CDN_ESBUILD_ESM, CDN_ESBUILD_BINARY, cdnImport } from "../constants/cdn-urls";
|
|
22
|
-
import { CDN_PAKO } from "../constants/config";
|
|
23
|
-
|
|
24
|
-
const ESBUILD_ESM_URL = CDN_ESBUILD_ESM;
|
|
25
|
-
const ESBUILD_WASM_URL = CDN_ESBUILD_BINARY;
|
|
26
|
-
const PAKO_URL = CDN_PAKO;
|
|
27
|
-
|
|
28
|
-
// --- Base64 helpers (duplicated from helpers/byte-encoding.ts) ---
|
|
29
|
-
|
|
30
|
-
const SEGMENT_SIZE = 8192;
|
|
31
|
-
|
|
32
|
-
function uint8ToBase64(data: Uint8Array): string {
|
|
33
|
-
const segments: string[] = [];
|
|
34
|
-
for (let offset = 0; offset < data.length; offset += SEGMENT_SIZE) {
|
|
35
|
-
segments.push(
|
|
36
|
-
String.fromCharCode.apply(
|
|
37
|
-
null,
|
|
38
|
-
Array.from(data.subarray(offset, offset + SEGMENT_SIZE)),
|
|
39
|
-
),
|
|
40
|
-
);
|
|
41
|
-
}
|
|
42
|
-
return btoa(segments.join(""));
|
|
43
|
-
}
|
|
44
|
-
|
|
45
|
-
// --- Tar parser (duplicated from packages/archive-extractor.ts) ---
|
|
46
|
-
|
|
47
|
-
function readNullTerminated(
|
|
48
|
-
buf: Uint8Array,
|
|
49
|
-
start: number,
|
|
50
|
-
len: number,
|
|
51
|
-
): string {
|
|
52
|
-
const slice = buf.slice(start, start + len);
|
|
53
|
-
const zeroPos = slice.indexOf(0);
|
|
54
|
-
const trimmed = zeroPos >= 0 ? slice.slice(0, zeroPos) : slice;
|
|
55
|
-
return new TextDecoder().decode(trimmed);
|
|
56
|
-
}
|
|
57
|
-
|
|
58
|
-
function readOctalField(
|
|
59
|
-
buf: Uint8Array,
|
|
60
|
-
start: number,
|
|
61
|
-
len: number,
|
|
62
|
-
): number {
|
|
63
|
-
const raw = readNullTerminated(buf, start, len).trim();
|
|
64
|
-
return parseInt(raw, 8) || 0;
|
|
65
|
-
}
|
|
66
|
-
|
|
67
|
-
type EntryKind = "file" | "directory" | "link" | "other";
|
|
68
|
-
|
|
69
|
-
function classifyTypeFlag(flag: string): EntryKind {
|
|
70
|
-
switch (flag) {
|
|
71
|
-
case "0":
|
|
72
|
-
case "\0":
|
|
73
|
-
case "":
|
|
74
|
-
return "file";
|
|
75
|
-
case "5":
|
|
76
|
-
return "directory";
|
|
77
|
-
case "1":
|
|
78
|
-
case "2":
|
|
79
|
-
return "link";
|
|
80
|
-
default:
|
|
81
|
-
return "other";
|
|
82
|
-
}
|
|
83
|
-
}
|
|
84
|
-
|
|
85
|
-
interface TarEntry {
|
|
86
|
-
filepath: string;
|
|
87
|
-
kind: EntryKind;
|
|
88
|
-
byteSize: number;
|
|
89
|
-
payload?: Uint8Array;
|
|
90
|
-
}
|
|
91
|
-
|
|
92
|
-
function* parseTar(raw: Uint8Array): Generator<TarEntry> {
|
|
93
|
-
const BLOCK = 512;
|
|
94
|
-
let cursor = 0;
|
|
95
|
-
|
|
96
|
-
while (cursor + BLOCK <= raw.length) {
|
|
97
|
-
const header = raw.slice(cursor, cursor + BLOCK);
|
|
98
|
-
cursor += BLOCK;
|
|
99
|
-
|
|
100
|
-
if (header.every((b) => b === 0)) break;
|
|
101
|
-
|
|
102
|
-
const nameField = readNullTerminated(header, 0, 100);
|
|
103
|
-
if (!nameField) continue;
|
|
104
|
-
|
|
105
|
-
const byteSize = readOctalField(header, 124, 12);
|
|
106
|
-
const typeChar = String.fromCharCode(header[156]);
|
|
107
|
-
const prefixField = readNullTerminated(header, 345, 155);
|
|
108
|
-
const filepath = prefixField
|
|
109
|
-
? `${prefixField}/${nameField}`
|
|
110
|
-
: nameField;
|
|
111
|
-
const kind = classifyTypeFlag(typeChar);
|
|
112
|
-
|
|
113
|
-
let payload: Uint8Array | undefined;
|
|
114
|
-
if (kind === "file") {
|
|
115
|
-
payload =
|
|
116
|
-
byteSize > 0
|
|
117
|
-
? raw.slice(cursor, cursor + byteSize)
|
|
118
|
-
: new Uint8Array(0);
|
|
119
|
-
if (byteSize > 0) {
|
|
120
|
-
cursor += Math.ceil(byteSize / BLOCK) * BLOCK;
|
|
121
|
-
}
|
|
122
|
-
}
|
|
123
|
-
|
|
124
|
-
yield { filepath, kind, byteSize, payload };
|
|
125
|
-
}
|
|
126
|
-
}
|
|
127
|
-
|
|
128
|
-
// --- JSX detection (duplicated from module-transformer.ts) ---
|
|
129
|
-
|
|
130
|
-
function detectJsx(source: string): boolean {
|
|
131
|
-
if (/<[A-Z][a-zA-Z0-9.]*[\s/>]/.test(source)) return true;
|
|
132
|
-
if (/<\/[a-zA-Z]/.test(source)) return true;
|
|
133
|
-
if (/\/>/.test(source)) return true;
|
|
134
|
-
if (/<>|<\/>/.test(source)) return true;
|
|
135
|
-
if (/React\.createElement\b/.test(source)) return true;
|
|
136
|
-
if (/jsx\(|jsxs\(|jsxDEV\(/.test(source)) return true;
|
|
137
|
-
return false;
|
|
138
|
-
}
|
|
139
|
-
|
|
140
|
-
// --- Default define map for esbuild transforms ---
|
|
141
|
-
|
|
142
|
-
const DEFAULT_DEFINE: Record<string, string> = {
|
|
143
|
-
"import.meta.url": "import_meta.url",
|
|
144
|
-
"import.meta.dirname": "import_meta.dirname",
|
|
145
|
-
"import.meta.filename": "import_meta.filename",
|
|
146
|
-
"import.meta": "import_meta",
|
|
147
|
-
};
|
|
148
|
-
|
|
149
|
-
// --- Worker endpoint ---
|
|
150
|
-
|
|
151
|
-
const workerEndpoint: OffloadWorkerEndpoint = {
|
|
152
|
-
async init(): Promise<void> {
|
|
153
|
-
if (initialized) return;
|
|
154
|
-
|
|
155
|
-
const pakoMod = await cdnImport(PAKO_URL);
|
|
156
|
-
pakoModule = pakoMod.default || pakoMod;
|
|
157
|
-
|
|
158
|
-
const esbuildMod = await cdnImport(ESBUILD_ESM_URL);
|
|
159
|
-
esbuildEngine = esbuildMod.default || esbuildMod;
|
|
160
|
-
|
|
161
|
-
try {
|
|
162
|
-
await esbuildEngine.initialize({ wasmURL: ESBUILD_WASM_URL });
|
|
163
|
-
} catch (err: any) {
|
|
164
|
-
if (
|
|
165
|
-
!(
|
|
166
|
-
err instanceof Error &&
|
|
167
|
-
err.message.includes('Cannot call "initialize" more than once')
|
|
168
|
-
)
|
|
169
|
-
) {
|
|
170
|
-
throw err;
|
|
171
|
-
}
|
|
172
|
-
}
|
|
173
|
-
|
|
174
|
-
initialized = true;
|
|
175
|
-
},
|
|
176
|
-
|
|
177
|
-
async transform(task: TransformTask): Promise<TransformResult> {
|
|
178
|
-
if (!esbuildEngine) throw new Error("Worker not initialized");
|
|
179
|
-
|
|
180
|
-
const opts = task.options || {};
|
|
181
|
-
let loader: string = opts.loader || "js";
|
|
182
|
-
const format = opts.format || "cjs";
|
|
183
|
-
const define = opts.define || DEFAULT_DEFINE;
|
|
184
|
-
|
|
185
|
-
if (loader === "js" && detectJsx(task.source)) loader = "jsx";
|
|
186
|
-
|
|
187
|
-
const transformOpts = {
|
|
188
|
-
loader,
|
|
189
|
-
format,
|
|
190
|
-
target: opts.target || "esnext",
|
|
191
|
-
platform: opts.platform || "neutral",
|
|
192
|
-
define,
|
|
193
|
-
};
|
|
194
|
-
|
|
195
|
-
try {
|
|
196
|
-
const output = await esbuildEngine.transform(task.source, transformOpts);
|
|
197
|
-
return {
|
|
198
|
-
type: "transform" as const,
|
|
199
|
-
id: task.id,
|
|
200
|
-
code: output.code,
|
|
201
|
-
warnings: (output.warnings || []).map(
|
|
202
|
-
(w: any) => w.text || String(w),
|
|
203
|
-
),
|
|
204
|
-
};
|
|
205
|
-
} catch (err: any) {
|
|
206
|
-
// Retry with fallback loaders
|
|
207
|
-
const fallbacks: string[] =
|
|
208
|
-
loader === "js"
|
|
209
|
-
? ["jsx", "tsx", "ts"]
|
|
210
|
-
: loader === "jsx"
|
|
211
|
-
? ["tsx"]
|
|
212
|
-
: [];
|
|
213
|
-
|
|
214
|
-
for (const fb of fallbacks) {
|
|
215
|
-
try {
|
|
216
|
-
const output = await esbuildEngine.transform(task.source, {
|
|
217
|
-
...transformOpts,
|
|
218
|
-
loader: fb,
|
|
219
|
-
});
|
|
220
|
-
return {
|
|
221
|
-
type: "transform" as const,
|
|
222
|
-
id: task.id,
|
|
223
|
-
code: output.code,
|
|
224
|
-
warnings: [],
|
|
225
|
-
};
|
|
226
|
-
} catch {
|
|
227
|
-
/* try next fallback */
|
|
228
|
-
}
|
|
229
|
-
}
|
|
230
|
-
|
|
231
|
-
// TLA: fall back to ESM format
|
|
232
|
-
if (err?.message?.includes("Top-level await")) {
|
|
233
|
-
try {
|
|
234
|
-
const output = await esbuildEngine.transform(task.source, {
|
|
235
|
-
...transformOpts,
|
|
236
|
-
format: "esm",
|
|
237
|
-
});
|
|
238
|
-
return {
|
|
239
|
-
type: "transform" as const,
|
|
240
|
-
id: task.id,
|
|
241
|
-
code: output.code,
|
|
242
|
-
warnings: [],
|
|
243
|
-
};
|
|
244
|
-
} catch {
|
|
245
|
-
/* fall through */
|
|
246
|
-
}
|
|
247
|
-
}
|
|
248
|
-
|
|
249
|
-
// All retries exhausted — return original source
|
|
250
|
-
return {
|
|
251
|
-
type: "transform" as const,
|
|
252
|
-
id: task.id,
|
|
253
|
-
code: task.source,
|
|
254
|
-
warnings: [err?.message || "transform failed"],
|
|
255
|
-
};
|
|
256
|
-
}
|
|
257
|
-
},
|
|
258
|
-
|
|
259
|
-
async extract(task: ExtractTask): Promise<ExtractResult> {
|
|
260
|
-
if (!pakoModule) throw new Error("Worker not initialized");
|
|
261
|
-
|
|
262
|
-
const response = await fetch(task.tarballUrl);
|
|
263
|
-
if (!response.ok) {
|
|
264
|
-
throw new Error(
|
|
265
|
-
`Archive download failed (HTTP ${response.status}): ${task.tarballUrl}`,
|
|
266
|
-
);
|
|
267
|
-
}
|
|
268
|
-
|
|
269
|
-
const compressed = new Uint8Array(await response.arrayBuffer());
|
|
270
|
-
const tarBytes = pakoModule.inflate(compressed) as Uint8Array;
|
|
271
|
-
|
|
272
|
-
const files: ExtractedFile[] = [];
|
|
273
|
-
for (const entry of parseTar(tarBytes)) {
|
|
274
|
-
if (entry.kind !== "file" && entry.kind !== "directory") continue;
|
|
275
|
-
|
|
276
|
-
let relative = entry.filepath;
|
|
277
|
-
if (task.stripComponents > 0) {
|
|
278
|
-
const segments = relative.split("/").filter(Boolean);
|
|
279
|
-
if (segments.length <= task.stripComponents) continue;
|
|
280
|
-
relative = segments.slice(task.stripComponents).join("/");
|
|
281
|
-
}
|
|
282
|
-
|
|
283
|
-
if (entry.kind === "file" && entry.payload) {
|
|
284
|
-
let data: string;
|
|
285
|
-
let isBinary = false;
|
|
286
|
-
try {
|
|
287
|
-
data = new TextDecoder("utf-8", { fatal: true }).decode(
|
|
288
|
-
entry.payload,
|
|
289
|
-
);
|
|
290
|
-
} catch {
|
|
291
|
-
data = uint8ToBase64(entry.payload);
|
|
292
|
-
isBinary = true;
|
|
293
|
-
}
|
|
294
|
-
files.push({ path: relative, data, isBinary });
|
|
295
|
-
}
|
|
296
|
-
}
|
|
297
|
-
|
|
298
|
-
return { type: "extract" as const, id: task.id, files };
|
|
299
|
-
},
|
|
300
|
-
|
|
301
|
-
async build(task: BuildTask): Promise<BuildResult> {
|
|
302
|
-
if (!esbuildEngine) throw new Error("Worker not initialized");
|
|
303
|
-
|
|
304
|
-
const fileMap = new Map<string, string>();
|
|
305
|
-
for (const [p, content] of Object.entries(task.files)) {
|
|
306
|
-
fileMap.set(p, content);
|
|
307
|
-
}
|
|
308
|
-
|
|
309
|
-
const volumePlugin = {
|
|
310
|
-
name: "offload-volume",
|
|
311
|
-
setup(build: any) {
|
|
312
|
-
build.onLoad({ filter: /.*/ }, (args: any) => {
|
|
313
|
-
const content = fileMap.get(args.path);
|
|
314
|
-
if (content === undefined) return null;
|
|
315
|
-
const ext = args.path.substring(args.path.lastIndexOf("."));
|
|
316
|
-
const loaderMap: Record<string, string> = {
|
|
317
|
-
".ts": "ts",
|
|
318
|
-
".tsx": "tsx",
|
|
319
|
-
".js": "js",
|
|
320
|
-
".mjs": "js",
|
|
321
|
-
".cjs": "js",
|
|
322
|
-
".jsx": "jsx",
|
|
323
|
-
".json": "json",
|
|
324
|
-
".css": "css",
|
|
325
|
-
};
|
|
326
|
-
return {
|
|
327
|
-
contents: content,
|
|
328
|
-
loader: loaderMap[ext] || undefined,
|
|
329
|
-
};
|
|
330
|
-
});
|
|
331
|
-
},
|
|
332
|
-
};
|
|
333
|
-
|
|
334
|
-
try {
|
|
335
|
-
const result = await esbuildEngine.build({
|
|
336
|
-
entryPoints: task.entryPoints,
|
|
337
|
-
stdin: task.stdin,
|
|
338
|
-
bundle: task.bundle ?? true,
|
|
339
|
-
format: task.format || "esm",
|
|
340
|
-
platform: task.platform || "browser",
|
|
341
|
-
target: task.target || "esnext",
|
|
342
|
-
minify: task.minify ?? false,
|
|
343
|
-
external: task.external,
|
|
344
|
-
write: false,
|
|
345
|
-
plugins: [volumePlugin],
|
|
346
|
-
absWorkingDir: task.absWorkingDir || "/",
|
|
347
|
-
});
|
|
348
|
-
|
|
349
|
-
const outputFiles: BuildOutputFile[] = (result.outputFiles || []).map(
|
|
350
|
-
(f: any) => ({
|
|
351
|
-
path: f.path,
|
|
352
|
-
text: f.text || new TextDecoder().decode(f.contents),
|
|
353
|
-
}),
|
|
354
|
-
);
|
|
355
|
-
|
|
356
|
-
return {
|
|
357
|
-
type: "build" as const,
|
|
358
|
-
id: task.id,
|
|
359
|
-
outputFiles,
|
|
360
|
-
errors: (result.errors || []).map(
|
|
361
|
-
(e: any) => e.text || String(e),
|
|
362
|
-
),
|
|
363
|
-
warnings: (result.warnings || []).map(
|
|
364
|
-
(w: any) => w.text || String(w),
|
|
365
|
-
),
|
|
366
|
-
};
|
|
367
|
-
} catch (err: any) {
|
|
368
|
-
return {
|
|
369
|
-
type: "build" as const,
|
|
370
|
-
id: task.id,
|
|
371
|
-
outputFiles: [],
|
|
372
|
-
errors: [err?.message || "build failed"],
|
|
373
|
-
warnings: [],
|
|
374
|
-
};
|
|
375
|
-
}
|
|
376
|
-
},
|
|
377
|
-
|
|
378
|
-
ping(): boolean {
|
|
379
|
-
return true;
|
|
380
|
-
},
|
|
381
|
-
};
|
|
382
|
-
|
|
383
|
-
expose(workerEndpoint);
|
|
1
|
+
// Offload Worker entry point — runs transform/extract/build tasks in a dedicated thread.
|
|
2
|
+
// Tar parser and base64 helpers are duplicated here since workers can't share module state.
|
|
3
|
+
|
|
4
|
+
import { expose } from "comlink";
|
|
5
|
+
import type {
|
|
6
|
+
OffloadWorkerEndpoint,
|
|
7
|
+
TransformTask,
|
|
8
|
+
TransformResult,
|
|
9
|
+
ExtractTask,
|
|
10
|
+
ExtractResult,
|
|
11
|
+
ExtractedFile,
|
|
12
|
+
BuildTask,
|
|
13
|
+
BuildResult,
|
|
14
|
+
BuildOutputFile,
|
|
15
|
+
} from "./offload-types";
|
|
16
|
+
|
|
17
|
+
let esbuildEngine: any = null;
|
|
18
|
+
let pakoModule: any = null;
|
|
19
|
+
let initialized = false;
|
|
20
|
+
|
|
21
|
+
import { CDN_ESBUILD_ESM, CDN_ESBUILD_BINARY, cdnImport } from "../constants/cdn-urls";
|
|
22
|
+
import { CDN_PAKO } from "../constants/config";
|
|
23
|
+
|
|
24
|
+
const ESBUILD_ESM_URL = CDN_ESBUILD_ESM;
|
|
25
|
+
const ESBUILD_WASM_URL = CDN_ESBUILD_BINARY;
|
|
26
|
+
const PAKO_URL = CDN_PAKO;
|
|
27
|
+
|
|
28
|
+
// --- Base64 helpers (duplicated from helpers/byte-encoding.ts) ---
|
|
29
|
+
|
|
30
|
+
const SEGMENT_SIZE = 8192;
|
|
31
|
+
|
|
32
|
+
function uint8ToBase64(data: Uint8Array): string {
|
|
33
|
+
const segments: string[] = [];
|
|
34
|
+
for (let offset = 0; offset < data.length; offset += SEGMENT_SIZE) {
|
|
35
|
+
segments.push(
|
|
36
|
+
String.fromCharCode.apply(
|
|
37
|
+
null,
|
|
38
|
+
Array.from(data.subarray(offset, offset + SEGMENT_SIZE)),
|
|
39
|
+
),
|
|
40
|
+
);
|
|
41
|
+
}
|
|
42
|
+
return btoa(segments.join(""));
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
// --- Tar parser (duplicated from packages/archive-extractor.ts) ---
|
|
46
|
+
|
|
47
|
+
function readNullTerminated(
|
|
48
|
+
buf: Uint8Array,
|
|
49
|
+
start: number,
|
|
50
|
+
len: number,
|
|
51
|
+
): string {
|
|
52
|
+
const slice = buf.slice(start, start + len);
|
|
53
|
+
const zeroPos = slice.indexOf(0);
|
|
54
|
+
const trimmed = zeroPos >= 0 ? slice.slice(0, zeroPos) : slice;
|
|
55
|
+
return new TextDecoder().decode(trimmed);
|
|
56
|
+
}
|
|
57
|
+
|
|
58
|
+
function readOctalField(
|
|
59
|
+
buf: Uint8Array,
|
|
60
|
+
start: number,
|
|
61
|
+
len: number,
|
|
62
|
+
): number {
|
|
63
|
+
const raw = readNullTerminated(buf, start, len).trim();
|
|
64
|
+
return parseInt(raw, 8) || 0;
|
|
65
|
+
}
|
|
66
|
+
|
|
67
|
+
type EntryKind = "file" | "directory" | "link" | "other";
|
|
68
|
+
|
|
69
|
+
function classifyTypeFlag(flag: string): EntryKind {
|
|
70
|
+
switch (flag) {
|
|
71
|
+
case "0":
|
|
72
|
+
case "\0":
|
|
73
|
+
case "":
|
|
74
|
+
return "file";
|
|
75
|
+
case "5":
|
|
76
|
+
return "directory";
|
|
77
|
+
case "1":
|
|
78
|
+
case "2":
|
|
79
|
+
return "link";
|
|
80
|
+
default:
|
|
81
|
+
return "other";
|
|
82
|
+
}
|
|
83
|
+
}
|
|
84
|
+
|
|
85
|
+
interface TarEntry {
|
|
86
|
+
filepath: string;
|
|
87
|
+
kind: EntryKind;
|
|
88
|
+
byteSize: number;
|
|
89
|
+
payload?: Uint8Array;
|
|
90
|
+
}
|
|
91
|
+
|
|
92
|
+
function* parseTar(raw: Uint8Array): Generator<TarEntry> {
|
|
93
|
+
const BLOCK = 512;
|
|
94
|
+
let cursor = 0;
|
|
95
|
+
|
|
96
|
+
while (cursor + BLOCK <= raw.length) {
|
|
97
|
+
const header = raw.slice(cursor, cursor + BLOCK);
|
|
98
|
+
cursor += BLOCK;
|
|
99
|
+
|
|
100
|
+
if (header.every((b) => b === 0)) break;
|
|
101
|
+
|
|
102
|
+
const nameField = readNullTerminated(header, 0, 100);
|
|
103
|
+
if (!nameField) continue;
|
|
104
|
+
|
|
105
|
+
const byteSize = readOctalField(header, 124, 12);
|
|
106
|
+
const typeChar = String.fromCharCode(header[156]);
|
|
107
|
+
const prefixField = readNullTerminated(header, 345, 155);
|
|
108
|
+
const filepath = prefixField
|
|
109
|
+
? `${prefixField}/${nameField}`
|
|
110
|
+
: nameField;
|
|
111
|
+
const kind = classifyTypeFlag(typeChar);
|
|
112
|
+
|
|
113
|
+
let payload: Uint8Array | undefined;
|
|
114
|
+
if (kind === "file") {
|
|
115
|
+
payload =
|
|
116
|
+
byteSize > 0
|
|
117
|
+
? raw.slice(cursor, cursor + byteSize)
|
|
118
|
+
: new Uint8Array(0);
|
|
119
|
+
if (byteSize > 0) {
|
|
120
|
+
cursor += Math.ceil(byteSize / BLOCK) * BLOCK;
|
|
121
|
+
}
|
|
122
|
+
}
|
|
123
|
+
|
|
124
|
+
yield { filepath, kind, byteSize, payload };
|
|
125
|
+
}
|
|
126
|
+
}
|
|
127
|
+
|
|
128
|
+
// --- JSX detection (duplicated from module-transformer.ts) ---
|
|
129
|
+
|
|
130
|
+
function detectJsx(source: string): boolean {
|
|
131
|
+
if (/<[A-Z][a-zA-Z0-9.]*[\s/>]/.test(source)) return true;
|
|
132
|
+
if (/<\/[a-zA-Z]/.test(source)) return true;
|
|
133
|
+
if (/\/>/.test(source)) return true;
|
|
134
|
+
if (/<>|<\/>/.test(source)) return true;
|
|
135
|
+
if (/React\.createElement\b/.test(source)) return true;
|
|
136
|
+
if (/jsx\(|jsxs\(|jsxDEV\(/.test(source)) return true;
|
|
137
|
+
return false;
|
|
138
|
+
}
|
|
139
|
+
|
|
140
|
+
// --- Default define map for esbuild transforms ---
|
|
141
|
+
|
|
142
|
+
const DEFAULT_DEFINE: Record<string, string> = {
|
|
143
|
+
"import.meta.url": "import_meta.url",
|
|
144
|
+
"import.meta.dirname": "import_meta.dirname",
|
|
145
|
+
"import.meta.filename": "import_meta.filename",
|
|
146
|
+
"import.meta": "import_meta",
|
|
147
|
+
};
|
|
148
|
+
|
|
149
|
+
// --- Worker endpoint ---
|
|
150
|
+
|
|
151
|
+
const workerEndpoint: OffloadWorkerEndpoint = {
|
|
152
|
+
async init(): Promise<void> {
|
|
153
|
+
if (initialized) return;
|
|
154
|
+
|
|
155
|
+
const pakoMod = await cdnImport(PAKO_URL);
|
|
156
|
+
pakoModule = pakoMod.default || pakoMod;
|
|
157
|
+
|
|
158
|
+
const esbuildMod = await cdnImport(ESBUILD_ESM_URL);
|
|
159
|
+
esbuildEngine = esbuildMod.default || esbuildMod;
|
|
160
|
+
|
|
161
|
+
try {
|
|
162
|
+
await esbuildEngine.initialize({ wasmURL: ESBUILD_WASM_URL });
|
|
163
|
+
} catch (err: any) {
|
|
164
|
+
if (
|
|
165
|
+
!(
|
|
166
|
+
err instanceof Error &&
|
|
167
|
+
err.message.includes('Cannot call "initialize" more than once')
|
|
168
|
+
)
|
|
169
|
+
) {
|
|
170
|
+
throw err;
|
|
171
|
+
}
|
|
172
|
+
}
|
|
173
|
+
|
|
174
|
+
initialized = true;
|
|
175
|
+
},
|
|
176
|
+
|
|
177
|
+
async transform(task: TransformTask): Promise<TransformResult> {
|
|
178
|
+
if (!esbuildEngine) throw new Error("Worker not initialized");
|
|
179
|
+
|
|
180
|
+
const opts = task.options || {};
|
|
181
|
+
let loader: string = opts.loader || "js";
|
|
182
|
+
const format = opts.format || "cjs";
|
|
183
|
+
const define = opts.define || DEFAULT_DEFINE;
|
|
184
|
+
|
|
185
|
+
if (loader === "js" && detectJsx(task.source)) loader = "jsx";
|
|
186
|
+
|
|
187
|
+
const transformOpts = {
|
|
188
|
+
loader,
|
|
189
|
+
format,
|
|
190
|
+
target: opts.target || "esnext",
|
|
191
|
+
platform: opts.platform || "neutral",
|
|
192
|
+
define,
|
|
193
|
+
};
|
|
194
|
+
|
|
195
|
+
try {
|
|
196
|
+
const output = await esbuildEngine.transform(task.source, transformOpts);
|
|
197
|
+
return {
|
|
198
|
+
type: "transform" as const,
|
|
199
|
+
id: task.id,
|
|
200
|
+
code: output.code,
|
|
201
|
+
warnings: (output.warnings || []).map(
|
|
202
|
+
(w: any) => w.text || String(w),
|
|
203
|
+
),
|
|
204
|
+
};
|
|
205
|
+
} catch (err: any) {
|
|
206
|
+
// Retry with fallback loaders
|
|
207
|
+
const fallbacks: string[] =
|
|
208
|
+
loader === "js"
|
|
209
|
+
? ["jsx", "tsx", "ts"]
|
|
210
|
+
: loader === "jsx"
|
|
211
|
+
? ["tsx"]
|
|
212
|
+
: [];
|
|
213
|
+
|
|
214
|
+
for (const fb of fallbacks) {
|
|
215
|
+
try {
|
|
216
|
+
const output = await esbuildEngine.transform(task.source, {
|
|
217
|
+
...transformOpts,
|
|
218
|
+
loader: fb,
|
|
219
|
+
});
|
|
220
|
+
return {
|
|
221
|
+
type: "transform" as const,
|
|
222
|
+
id: task.id,
|
|
223
|
+
code: output.code,
|
|
224
|
+
warnings: [],
|
|
225
|
+
};
|
|
226
|
+
} catch {
|
|
227
|
+
/* try next fallback */
|
|
228
|
+
}
|
|
229
|
+
}
|
|
230
|
+
|
|
231
|
+
// TLA: fall back to ESM format
|
|
232
|
+
if (err?.message?.includes("Top-level await")) {
|
|
233
|
+
try {
|
|
234
|
+
const output = await esbuildEngine.transform(task.source, {
|
|
235
|
+
...transformOpts,
|
|
236
|
+
format: "esm",
|
|
237
|
+
});
|
|
238
|
+
return {
|
|
239
|
+
type: "transform" as const,
|
|
240
|
+
id: task.id,
|
|
241
|
+
code: output.code,
|
|
242
|
+
warnings: [],
|
|
243
|
+
};
|
|
244
|
+
} catch {
|
|
245
|
+
/* fall through */
|
|
246
|
+
}
|
|
247
|
+
}
|
|
248
|
+
|
|
249
|
+
// All retries exhausted — return original source
|
|
250
|
+
return {
|
|
251
|
+
type: "transform" as const,
|
|
252
|
+
id: task.id,
|
|
253
|
+
code: task.source,
|
|
254
|
+
warnings: [err?.message || "transform failed"],
|
|
255
|
+
};
|
|
256
|
+
}
|
|
257
|
+
},
|
|
258
|
+
|
|
259
|
+
async extract(task: ExtractTask): Promise<ExtractResult> {
|
|
260
|
+
if (!pakoModule) throw new Error("Worker not initialized");
|
|
261
|
+
|
|
262
|
+
const response = await fetch(task.tarballUrl);
|
|
263
|
+
if (!response.ok) {
|
|
264
|
+
throw new Error(
|
|
265
|
+
`Archive download failed (HTTP ${response.status}): ${task.tarballUrl}`,
|
|
266
|
+
);
|
|
267
|
+
}
|
|
268
|
+
|
|
269
|
+
const compressed = new Uint8Array(await response.arrayBuffer());
|
|
270
|
+
const tarBytes = pakoModule.inflate(compressed) as Uint8Array;
|
|
271
|
+
|
|
272
|
+
const files: ExtractedFile[] = [];
|
|
273
|
+
for (const entry of parseTar(tarBytes)) {
|
|
274
|
+
if (entry.kind !== "file" && entry.kind !== "directory") continue;
|
|
275
|
+
|
|
276
|
+
let relative = entry.filepath;
|
|
277
|
+
if (task.stripComponents > 0) {
|
|
278
|
+
const segments = relative.split("/").filter(Boolean);
|
|
279
|
+
if (segments.length <= task.stripComponents) continue;
|
|
280
|
+
relative = segments.slice(task.stripComponents).join("/");
|
|
281
|
+
}
|
|
282
|
+
|
|
283
|
+
if (entry.kind === "file" && entry.payload) {
|
|
284
|
+
let data: string;
|
|
285
|
+
let isBinary = false;
|
|
286
|
+
try {
|
|
287
|
+
data = new TextDecoder("utf-8", { fatal: true }).decode(
|
|
288
|
+
entry.payload,
|
|
289
|
+
);
|
|
290
|
+
} catch {
|
|
291
|
+
data = uint8ToBase64(entry.payload);
|
|
292
|
+
isBinary = true;
|
|
293
|
+
}
|
|
294
|
+
files.push({ path: relative, data, isBinary });
|
|
295
|
+
}
|
|
296
|
+
}
|
|
297
|
+
|
|
298
|
+
return { type: "extract" as const, id: task.id, files };
|
|
299
|
+
},
|
|
300
|
+
|
|
301
|
+
async build(task: BuildTask): Promise<BuildResult> {
|
|
302
|
+
if (!esbuildEngine) throw new Error("Worker not initialized");
|
|
303
|
+
|
|
304
|
+
const fileMap = new Map<string, string>();
|
|
305
|
+
for (const [p, content] of Object.entries(task.files)) {
|
|
306
|
+
fileMap.set(p, content);
|
|
307
|
+
}
|
|
308
|
+
|
|
309
|
+
const volumePlugin = {
|
|
310
|
+
name: "offload-volume",
|
|
311
|
+
setup(build: any) {
|
|
312
|
+
build.onLoad({ filter: /.*/ }, (args: any) => {
|
|
313
|
+
const content = fileMap.get(args.path);
|
|
314
|
+
if (content === undefined) return null;
|
|
315
|
+
const ext = args.path.substring(args.path.lastIndexOf("."));
|
|
316
|
+
const loaderMap: Record<string, string> = {
|
|
317
|
+
".ts": "ts",
|
|
318
|
+
".tsx": "tsx",
|
|
319
|
+
".js": "js",
|
|
320
|
+
".mjs": "js",
|
|
321
|
+
".cjs": "js",
|
|
322
|
+
".jsx": "jsx",
|
|
323
|
+
".json": "json",
|
|
324
|
+
".css": "css",
|
|
325
|
+
};
|
|
326
|
+
return {
|
|
327
|
+
contents: content,
|
|
328
|
+
loader: loaderMap[ext] || undefined,
|
|
329
|
+
};
|
|
330
|
+
});
|
|
331
|
+
},
|
|
332
|
+
};
|
|
333
|
+
|
|
334
|
+
try {
|
|
335
|
+
const result = await esbuildEngine.build({
|
|
336
|
+
entryPoints: task.entryPoints,
|
|
337
|
+
stdin: task.stdin,
|
|
338
|
+
bundle: task.bundle ?? true,
|
|
339
|
+
format: task.format || "esm",
|
|
340
|
+
platform: task.platform || "browser",
|
|
341
|
+
target: task.target || "esnext",
|
|
342
|
+
minify: task.minify ?? false,
|
|
343
|
+
external: task.external,
|
|
344
|
+
write: false,
|
|
345
|
+
plugins: [volumePlugin],
|
|
346
|
+
absWorkingDir: task.absWorkingDir || "/",
|
|
347
|
+
});
|
|
348
|
+
|
|
349
|
+
const outputFiles: BuildOutputFile[] = (result.outputFiles || []).map(
|
|
350
|
+
(f: any) => ({
|
|
351
|
+
path: f.path,
|
|
352
|
+
text: f.text || new TextDecoder().decode(f.contents),
|
|
353
|
+
}),
|
|
354
|
+
);
|
|
355
|
+
|
|
356
|
+
return {
|
|
357
|
+
type: "build" as const,
|
|
358
|
+
id: task.id,
|
|
359
|
+
outputFiles,
|
|
360
|
+
errors: (result.errors || []).map(
|
|
361
|
+
(e: any) => e.text || String(e),
|
|
362
|
+
),
|
|
363
|
+
warnings: (result.warnings || []).map(
|
|
364
|
+
(w: any) => w.text || String(w),
|
|
365
|
+
),
|
|
366
|
+
};
|
|
367
|
+
} catch (err: any) {
|
|
368
|
+
return {
|
|
369
|
+
type: "build" as const,
|
|
370
|
+
id: task.id,
|
|
371
|
+
outputFiles: [],
|
|
372
|
+
errors: [err?.message || "build failed"],
|
|
373
|
+
warnings: [],
|
|
374
|
+
};
|
|
375
|
+
}
|
|
376
|
+
},
|
|
377
|
+
|
|
378
|
+
ping(): boolean {
|
|
379
|
+
return true;
|
|
380
|
+
},
|
|
381
|
+
};
|
|
382
|
+
|
|
383
|
+
expose(workerEndpoint);
|