@stream-mdx/worker 0.2.0 → 0.4.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +6 -4
- package/dist/direct.cjs +3978 -0
- package/dist/direct.d.cts +50 -0
- package/dist/direct.d.ts +50 -0
- package/dist/direct.mjs +3961 -0
- package/dist/hosted/markdown-worker.js +303 -23
- package/dist/index.cjs +3954 -3
- package/dist/index.d.cts +2 -1
- package/dist/index.d.ts +2 -1
- package/dist/index.mjs +3960 -0
- package/dist/node/index.cjs +383 -4
- package/dist/node/index.d.cts +68 -1
- package/dist/node/index.d.ts +68 -1
- package/dist/node/index.mjs +381 -4
- package/dist/node/worker-thread-entry.cjs +127 -3
- package/dist/node/worker-thread-entry.d.cts +2 -1
- package/dist/node/worker-thread-entry.d.ts +2 -1
- package/dist/node/worker-thread-entry.mjs +106 -3
- package/dist/worker-client-wln-351a.d.cts +84 -0
- package/dist/worker-client-wln-351a.d.ts +84 -0
- package/dist/worker-client.d.cts +2 -81
- package/dist/worker-client.d.ts +2 -81
- package/package.json +8 -3
package/dist/node/index.mjs
CHANGED
|
@@ -1,13 +1,45 @@
|
|
|
1
1
|
// src/node/index.ts
|
|
2
|
+
import { createHash } from "crypto";
|
|
3
|
+
import { existsSync } from "fs";
|
|
4
|
+
import { promises as fs } from "fs";
|
|
5
|
+
import { createRequire } from "module";
|
|
2
6
|
import path from "path";
|
|
3
|
-
import { pathToFileURL } from "url";
|
|
7
|
+
import { fileURLToPath, pathToFileURL } from "url";
|
|
4
8
|
import { Worker } from "worker_threads";
|
|
9
|
+
import { PATCH_ROOT_ID, applyPatchBatch, createInitialSnapshot } from "@stream-mdx/core";
|
|
5
10
|
function getHostedWorkerBundleUrl() {
|
|
6
|
-
|
|
11
|
+
const packageRoot = resolvePackageRootFromRequire();
|
|
12
|
+
if (packageRoot) {
|
|
13
|
+
const resolved = firstExistingPath([
|
|
14
|
+
path.join(packageRoot, "dist/hosted/markdown-worker.js"),
|
|
15
|
+
path.join(packageRoot, "dist/worker.mjs"),
|
|
16
|
+
path.join(packageRoot, "dist/worker.js"),
|
|
17
|
+
path.join(packageRoot, "src/worker.ts")
|
|
18
|
+
]);
|
|
19
|
+
if (resolved) {
|
|
20
|
+
return pathToFileURL(resolved);
|
|
21
|
+
}
|
|
22
|
+
}
|
|
23
|
+
const moduleUrl = getModuleUrl();
|
|
24
|
+
const candidates = [
|
|
25
|
+
new URL("../hosted/markdown-worker.js", moduleUrl),
|
|
26
|
+
new URL("../../dist/hosted/markdown-worker.js", moduleUrl),
|
|
27
|
+
new URL("../../../public/workers/markdown-worker.js", moduleUrl),
|
|
28
|
+
new URL("../../../../public/workers/markdown-worker.js", moduleUrl),
|
|
29
|
+
new URL("../../dist/worker.mjs", moduleUrl),
|
|
30
|
+
new URL("../../dist/worker.js", moduleUrl),
|
|
31
|
+
new URL("../worker.mjs", moduleUrl),
|
|
32
|
+
new URL("../worker.js", moduleUrl),
|
|
33
|
+
new URL("../worker.ts", moduleUrl)
|
|
34
|
+
];
|
|
35
|
+
for (const candidate of candidates) {
|
|
36
|
+
if (urlExists(candidate)) return candidate;
|
|
37
|
+
}
|
|
38
|
+
return candidates[0];
|
|
7
39
|
}
|
|
8
40
|
function createWorkerThread(options = {}) {
|
|
9
41
|
const { workerBundle, workerData, ...workerOptions } = options;
|
|
10
|
-
const runnerUrl =
|
|
42
|
+
const runnerUrl = resolveWorkerThreadEntryUrl();
|
|
11
43
|
const bundleUrl = normalizeWorkerBundleUrl(workerBundle) ?? getHostedWorkerBundleUrl();
|
|
12
44
|
return new Worker(runnerUrl, {
|
|
13
45
|
...workerOptions,
|
|
@@ -17,6 +49,37 @@ function createWorkerThread(options = {}) {
|
|
|
17
49
|
}
|
|
18
50
|
});
|
|
19
51
|
}
|
|
52
|
+
function resolveWorkerThreadEntryUrl() {
|
|
53
|
+
const packageRoot = resolvePackageRootFromRequire();
|
|
54
|
+
if (packageRoot) {
|
|
55
|
+
const resolved = firstExistingPath([
|
|
56
|
+
path.join(packageRoot, "dist/node/worker-thread-entry.mjs"),
|
|
57
|
+
path.join(packageRoot, "dist/node/worker-thread-entry.cjs"),
|
|
58
|
+
path.join(packageRoot, "src/node/worker-thread-entry.ts")
|
|
59
|
+
]);
|
|
60
|
+
if (resolved) {
|
|
61
|
+
return pathToFileURL(resolved);
|
|
62
|
+
}
|
|
63
|
+
}
|
|
64
|
+
const moduleUrl = getModuleUrl();
|
|
65
|
+
const distEntry = new URL("./worker-thread-entry.mjs", moduleUrl);
|
|
66
|
+
const sourceEntry = new URL("./worker-thread-entry.ts", moduleUrl);
|
|
67
|
+
if (urlExists(distEntry)) {
|
|
68
|
+
return distEntry;
|
|
69
|
+
}
|
|
70
|
+
if (urlExists(sourceEntry)) {
|
|
71
|
+
return sourceEntry;
|
|
72
|
+
}
|
|
73
|
+
return distEntry;
|
|
74
|
+
}
|
|
75
|
+
function urlExists(url) {
|
|
76
|
+
if (url.protocol !== "file:") return false;
|
|
77
|
+
try {
|
|
78
|
+
return existsSync(path.normalize(fileURLToPath(url)));
|
|
79
|
+
} catch {
|
|
80
|
+
return false;
|
|
81
|
+
}
|
|
82
|
+
}
|
|
20
83
|
function normalizeWorkerBundleUrl(value) {
|
|
21
84
|
if (!value) return void 0;
|
|
22
85
|
if (value instanceof URL) return value;
|
|
@@ -26,13 +89,327 @@ function normalizeWorkerBundleUrl(value) {
|
|
|
26
89
|
return pathToFileURL(path.resolve(value));
|
|
27
90
|
}
|
|
28
91
|
}
|
|
92
|
+
function firstExistingPath(candidates) {
|
|
93
|
+
for (const candidate of candidates) {
|
|
94
|
+
try {
|
|
95
|
+
if (existsSync(candidate)) return candidate;
|
|
96
|
+
} catch {
|
|
97
|
+
}
|
|
98
|
+
}
|
|
99
|
+
return void 0;
|
|
100
|
+
}
|
|
101
|
+
function resolvePackageRootFromRequire() {
|
|
102
|
+
const bases = [
|
|
103
|
+
path.join(process.cwd(), "package.json"),
|
|
104
|
+
typeof __filename === "string" && __filename.length > 0 ? __filename : path.join(process.cwd(), "__stream-mdx-node-resolver__.cjs")
|
|
105
|
+
];
|
|
106
|
+
for (const base of bases) {
|
|
107
|
+
try {
|
|
108
|
+
const req = createRequire(base);
|
|
109
|
+
const nodeEntry = req.resolve("@stream-mdx/worker/node");
|
|
110
|
+
return path.resolve(path.dirname(nodeEntry), "..", "..");
|
|
111
|
+
} catch {
|
|
112
|
+
}
|
|
113
|
+
}
|
|
114
|
+
return void 0;
|
|
115
|
+
}
|
|
29
116
|
function getModuleUrl() {
|
|
117
|
+
const fromImportMeta = getImportMetaUrl();
|
|
118
|
+
if (fromImportMeta) {
|
|
119
|
+
return fromImportMeta;
|
|
120
|
+
}
|
|
30
121
|
if (typeof __filename === "string" && __filename.length > 0) {
|
|
31
122
|
return pathToFileURL(__filename).href;
|
|
32
123
|
}
|
|
33
|
-
|
|
124
|
+
const fromStack = getModuleUrlFromStack();
|
|
125
|
+
if (fromStack) {
|
|
126
|
+
return fromStack;
|
|
127
|
+
}
|
|
128
|
+
throw new Error("[stream-mdx] Unable to resolve module URL.");
|
|
129
|
+
}
|
|
130
|
+
function getImportMetaUrl() {
|
|
131
|
+
try {
|
|
132
|
+
const candidate = (0, eval)("import.meta.url");
|
|
133
|
+
return typeof candidate === "string" && candidate.length > 0 ? candidate : void 0;
|
|
134
|
+
} catch {
|
|
135
|
+
return void 0;
|
|
136
|
+
}
|
|
137
|
+
}
|
|
138
|
+
function getModuleUrlFromStack() {
|
|
139
|
+
const previous = Error.prepareStackTrace;
|
|
140
|
+
try {
|
|
141
|
+
Error.prepareStackTrace = (_error, stackTrace2) => stackTrace2;
|
|
142
|
+
const stackTrace = new Error().stack;
|
|
143
|
+
if (!Array.isArray(stackTrace)) return void 0;
|
|
144
|
+
const files = stackTrace.map((frame) => frame.getFileName()).filter((fileName) => Boolean(fileName));
|
|
145
|
+
const preferred = files.find(
|
|
146
|
+
(fileName) => fileName.includes("/markdown-v2-worker/src/node/index.") || fileName.includes("/markdown-v2-worker/dist/node/index.") || fileName.includes("\\markdown-v2-worker\\src\\node\\index.") || fileName.includes("\\markdown-v2-worker\\dist\\node\\index.")
|
|
147
|
+
);
|
|
148
|
+
if (preferred) {
|
|
149
|
+
return preferred.startsWith("file://") ? preferred : pathToFileURL(preferred).href;
|
|
150
|
+
}
|
|
151
|
+
const firstAbsolute = files.find((fileName) => fileName.startsWith("file://") || path.isAbsolute(fileName));
|
|
152
|
+
if (!firstAbsolute) return void 0;
|
|
153
|
+
return firstAbsolute.startsWith("file://") ? firstAbsolute : pathToFileURL(firstAbsolute).href;
|
|
154
|
+
} catch {
|
|
155
|
+
return void 0;
|
|
156
|
+
} finally {
|
|
157
|
+
Error.prepareStackTrace = previous;
|
|
158
|
+
}
|
|
159
|
+
}
|
|
160
|
+
function computeSnapshotHash(text, init, hashSalt) {
|
|
161
|
+
return hashCompileInput(text, init, hashSalt);
|
|
162
|
+
}
|
|
163
|
+
async function compileMarkdownSnapshot(options) {
|
|
164
|
+
const {
|
|
165
|
+
text,
|
|
166
|
+
init,
|
|
167
|
+
hashSalt,
|
|
168
|
+
worker: providedWorker,
|
|
169
|
+
workerOptions,
|
|
170
|
+
timeoutMs = 3e4,
|
|
171
|
+
settleMs = 50,
|
|
172
|
+
cache,
|
|
173
|
+
finalize = true
|
|
174
|
+
} = options;
|
|
175
|
+
const hash = hashCompileInput(text, init, hashSalt);
|
|
176
|
+
const cacheKey = cache?.key ?? hash;
|
|
177
|
+
const cachePath = cache ? path.join(cache.dir, `${sanitizeCacheKey(cacheKey)}.json`) : null;
|
|
178
|
+
if (cachePath) {
|
|
179
|
+
const cached = await readSnapshotCache(cachePath, hash);
|
|
180
|
+
if (cached) {
|
|
181
|
+
const snapshot = createInitialSnapshot(cached.blocks);
|
|
182
|
+
return {
|
|
183
|
+
blocks: snapshot.blocks,
|
|
184
|
+
snapshot,
|
|
185
|
+
artifact: cached,
|
|
186
|
+
fromCache: true
|
|
187
|
+
};
|
|
188
|
+
}
|
|
189
|
+
}
|
|
190
|
+
const worker = providedWorker ?? createWorkerThread(workerOptions);
|
|
191
|
+
const ownedWorker = !providedWorker;
|
|
192
|
+
return await new Promise((resolve, reject) => {
|
|
193
|
+
let snapshot = null;
|
|
194
|
+
let initialized = false;
|
|
195
|
+
let settled = false;
|
|
196
|
+
let idleTimer = null;
|
|
197
|
+
let timeoutTimer = null;
|
|
198
|
+
const finalizeNow = () => {
|
|
199
|
+
if (settled) return;
|
|
200
|
+
settled = true;
|
|
201
|
+
cleanup();
|
|
202
|
+
const finalSnapshot = snapshot ?? createInitialSnapshot();
|
|
203
|
+
const blocks = finalSnapshot.blocks;
|
|
204
|
+
const contentHash = createHash("sha256").update(text).digest("hex");
|
|
205
|
+
const configHash = createHash("sha256").update(
|
|
206
|
+
stableStringify({
|
|
207
|
+
init: init ?? null,
|
|
208
|
+
salt: hashSalt ?? null
|
|
209
|
+
})
|
|
210
|
+
).digest("hex");
|
|
211
|
+
const artifact = {
|
|
212
|
+
version: 1,
|
|
213
|
+
schemaId: "streammdx.snapshot.v1",
|
|
214
|
+
createdAt: (/* @__PURE__ */ new Date()).toISOString(),
|
|
215
|
+
hash,
|
|
216
|
+
contentHash,
|
|
217
|
+
configHash,
|
|
218
|
+
hashSalt: hashSalt ?? void 0,
|
|
219
|
+
blocks,
|
|
220
|
+
tocHeadings: (() => {
|
|
221
|
+
const root = finalSnapshot.nodes.get(PATCH_ROOT_ID);
|
|
222
|
+
const maybe = root?.props?.tocHeadings;
|
|
223
|
+
return Array.isArray(maybe) ? maybe : void 0;
|
|
224
|
+
})(),
|
|
225
|
+
init: init ? {
|
|
226
|
+
docPlugins: init.docPlugins,
|
|
227
|
+
mdx: init.mdx,
|
|
228
|
+
prewarmLangs: init.prewarmLangs
|
|
229
|
+
} : void 0
|
|
230
|
+
};
|
|
231
|
+
if (cachePath && !cache?.readOnly) {
|
|
232
|
+
void writeSnapshotCache(cachePath, artifact);
|
|
233
|
+
}
|
|
234
|
+
resolve({
|
|
235
|
+
blocks,
|
|
236
|
+
snapshot: finalSnapshot,
|
|
237
|
+
artifact,
|
|
238
|
+
fromCache: false
|
|
239
|
+
});
|
|
240
|
+
};
|
|
241
|
+
const fail = (error) => {
|
|
242
|
+
if (settled) return;
|
|
243
|
+
settled = true;
|
|
244
|
+
cleanup();
|
|
245
|
+
reject(error);
|
|
246
|
+
};
|
|
247
|
+
const scheduleIdleFinalize = () => {
|
|
248
|
+
if (finalize || !initialized) return;
|
|
249
|
+
if (idleTimer) {
|
|
250
|
+
clearTimeout(idleTimer);
|
|
251
|
+
}
|
|
252
|
+
idleTimer = setTimeout(() => finalizeNow(), settleMs);
|
|
253
|
+
};
|
|
254
|
+
const handleMessage = (message) => {
|
|
255
|
+
switch (message.type) {
|
|
256
|
+
case "INITIALIZED":
|
|
257
|
+
snapshot = createInitialSnapshot(message.blocks);
|
|
258
|
+
initialized = true;
|
|
259
|
+
scheduleIdleFinalize();
|
|
260
|
+
break;
|
|
261
|
+
case "PATCH":
|
|
262
|
+
if (snapshot) {
|
|
263
|
+
snapshot.blocks = applyPatchBatch(snapshot, message.patches);
|
|
264
|
+
}
|
|
265
|
+
scheduleIdleFinalize();
|
|
266
|
+
break;
|
|
267
|
+
case "FINALIZED":
|
|
268
|
+
if (finalize) {
|
|
269
|
+
finalizeNow();
|
|
270
|
+
}
|
|
271
|
+
break;
|
|
272
|
+
case "RESET":
|
|
273
|
+
fail(new Error(`Worker reset during compile: ${message.reason}`));
|
|
274
|
+
break;
|
|
275
|
+
case "ERROR":
|
|
276
|
+
fail(new Error(`Worker error (${message.phase}): ${message.error.message}`));
|
|
277
|
+
break;
|
|
278
|
+
default:
|
|
279
|
+
break;
|
|
280
|
+
}
|
|
281
|
+
};
|
|
282
|
+
const cleanup = () => {
|
|
283
|
+
if (idleTimer) clearTimeout(idleTimer);
|
|
284
|
+
if (timeoutTimer) clearTimeout(timeoutTimer);
|
|
285
|
+
worker.off("message", handleMessage);
|
|
286
|
+
worker.off("error", fail);
|
|
287
|
+
if (ownedWorker) {
|
|
288
|
+
try {
|
|
289
|
+
worker.terminate();
|
|
290
|
+
} catch {
|
|
291
|
+
}
|
|
292
|
+
}
|
|
293
|
+
};
|
|
294
|
+
worker.on("message", handleMessage);
|
|
295
|
+
worker.on("error", fail);
|
|
296
|
+
timeoutTimer = setTimeout(() => {
|
|
297
|
+
fail(new Error("Worker compile timed out."));
|
|
298
|
+
}, timeoutMs);
|
|
299
|
+
const initMessage = {
|
|
300
|
+
type: "INIT",
|
|
301
|
+
initialContent: text,
|
|
302
|
+
prewarmLangs: init?.prewarmLangs,
|
|
303
|
+
docPlugins: init?.docPlugins,
|
|
304
|
+
mdx: init?.mdx
|
|
305
|
+
};
|
|
306
|
+
worker.postMessage(initMessage);
|
|
307
|
+
if (finalize) {
|
|
308
|
+
worker.postMessage({ type: "FINALIZE" });
|
|
309
|
+
}
|
|
310
|
+
});
|
|
311
|
+
}
|
|
312
|
+
function createCompileMarkdownSnapshotPool(options = {}) {
|
|
313
|
+
const size = Math.max(1, Math.min(8, Math.floor(options.size ?? 2)));
|
|
314
|
+
const workers = new Array(size).fill(null).map(() => createWorkerThread(options.workerOptions));
|
|
315
|
+
const queues = new Array(size).fill(Promise.resolve());
|
|
316
|
+
let rr = 0;
|
|
317
|
+
const maxMemoryEntries = Math.max(0, Math.floor(options.maxMemoryEntries ?? 64));
|
|
318
|
+
const memory = maxMemoryEntries > 0 ? /* @__PURE__ */ new Map() : null;
|
|
319
|
+
const enqueue = (index, fn) => {
|
|
320
|
+
let resolveOuter;
|
|
321
|
+
let rejectOuter;
|
|
322
|
+
const outer = new Promise((resolve, reject) => {
|
|
323
|
+
resolveOuter = resolve;
|
|
324
|
+
rejectOuter = reject;
|
|
325
|
+
});
|
|
326
|
+
queues[index] = queues[index].catch(() => void 0).then(async () => {
|
|
327
|
+
try {
|
|
328
|
+
const value = await fn();
|
|
329
|
+
resolveOuter(value);
|
|
330
|
+
} catch (err) {
|
|
331
|
+
rejectOuter(err);
|
|
332
|
+
}
|
|
333
|
+
}).then(() => void 0);
|
|
334
|
+
return outer;
|
|
335
|
+
};
|
|
336
|
+
const compile = async (compileOptions) => {
|
|
337
|
+
const hash = computeSnapshotHash(compileOptions.text, compileOptions.init, compileOptions.hashSalt);
|
|
338
|
+
if (memory && memory.has(hash)) {
|
|
339
|
+
return memory.get(hash);
|
|
340
|
+
}
|
|
341
|
+
const index = rr++ % workers.length;
|
|
342
|
+
const result = await enqueue(index, async () => {
|
|
343
|
+
return await compileMarkdownSnapshot({
|
|
344
|
+
...compileOptions,
|
|
345
|
+
worker: workers[index]
|
|
346
|
+
});
|
|
347
|
+
});
|
|
348
|
+
if (memory) {
|
|
349
|
+
memory.set(hash, result);
|
|
350
|
+
if (memory.size > maxMemoryEntries) {
|
|
351
|
+
const firstKey = memory.keys().next().value;
|
|
352
|
+
if (firstKey) memory.delete(firstKey);
|
|
353
|
+
}
|
|
354
|
+
}
|
|
355
|
+
return result;
|
|
356
|
+
};
|
|
357
|
+
const close = async () => {
|
|
358
|
+
for (const worker of workers) {
|
|
359
|
+
try {
|
|
360
|
+
await worker.terminate();
|
|
361
|
+
} catch {
|
|
362
|
+
}
|
|
363
|
+
}
|
|
364
|
+
};
|
|
365
|
+
return { compile, close };
|
|
366
|
+
}
|
|
367
|
+
async function readSnapshotCache(cachePath, hash) {
|
|
368
|
+
try {
|
|
369
|
+
const raw = await fs.readFile(cachePath, "utf8");
|
|
370
|
+
const parsed = JSON.parse(raw);
|
|
371
|
+
if (!parsed || parsed.version !== 1 || parsed.hash !== hash || !Array.isArray(parsed.blocks)) {
|
|
372
|
+
return null;
|
|
373
|
+
}
|
|
374
|
+
return parsed;
|
|
375
|
+
} catch {
|
|
376
|
+
return null;
|
|
377
|
+
}
|
|
378
|
+
}
|
|
379
|
+
async function writeSnapshotCache(cachePath, artifact) {
|
|
380
|
+
try {
|
|
381
|
+
await fs.mkdir(path.dirname(cachePath), { recursive: true });
|
|
382
|
+
await fs.writeFile(cachePath, JSON.stringify(artifact, null, 2), "utf8");
|
|
383
|
+
} catch {
|
|
384
|
+
}
|
|
385
|
+
}
|
|
386
|
+
function sanitizeCacheKey(value) {
|
|
387
|
+
return value.replace(/[^a-z0-9._-]+/gi, "_");
|
|
388
|
+
}
|
|
389
|
+
function hashCompileInput(text, init, hashSalt) {
|
|
390
|
+
const payload = stableStringify({
|
|
391
|
+
text,
|
|
392
|
+
init: init ?? null,
|
|
393
|
+
salt: hashSalt ?? null
|
|
394
|
+
});
|
|
395
|
+
return createHash("sha256").update(payload).digest("hex");
|
|
396
|
+
}
|
|
397
|
+
function stableStringify(value) {
|
|
398
|
+
if (value === null || typeof value !== "object") {
|
|
399
|
+
const primitive = JSON.stringify(value);
|
|
400
|
+
return primitive === void 0 ? "null" : primitive;
|
|
401
|
+
}
|
|
402
|
+
if (Array.isArray(value)) {
|
|
403
|
+
return `[${value.map((item) => stableStringify(item)).join(",")}]`;
|
|
404
|
+
}
|
|
405
|
+
const entries = Object.entries(value).sort(([a], [b]) => a.localeCompare(b));
|
|
406
|
+
const body = entries.map(([key, val]) => `${JSON.stringify(key)}:${stableStringify(val)}`).join(",");
|
|
407
|
+
return `{${body}}`;
|
|
34
408
|
}
|
|
35
409
|
export {
|
|
410
|
+
compileMarkdownSnapshot,
|
|
411
|
+
computeSnapshotHash,
|
|
412
|
+
createCompileMarkdownSnapshotPool,
|
|
36
413
|
createWorkerThread,
|
|
37
414
|
getHostedWorkerBundleUrl
|
|
38
415
|
};
|
|
@@ -1,9 +1,33 @@
|
|
|
1
1
|
"use strict";
|
|
2
|
+
var __create = Object.create;
|
|
3
|
+
var __defProp = Object.defineProperty;
|
|
4
|
+
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
5
|
+
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
6
|
+
var __getProtoOf = Object.getPrototypeOf;
|
|
7
|
+
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
|
8
|
+
var __copyProps = (to, from, except, desc) => {
|
|
9
|
+
if (from && typeof from === "object" || typeof from === "function") {
|
|
10
|
+
for (let key of __getOwnPropNames(from))
|
|
11
|
+
if (!__hasOwnProp.call(to, key) && key !== except)
|
|
12
|
+
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
|
13
|
+
}
|
|
14
|
+
return to;
|
|
15
|
+
};
|
|
16
|
+
var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
|
|
17
|
+
// If the importer is in node compatibility mode or this is not an ESM
|
|
18
|
+
// file that has been converted to a CommonJS file using a Babel-
|
|
19
|
+
// compatible transform (i.e. "__esModule" has not been set), then set
|
|
20
|
+
// "default" to the CommonJS "module.exports" for node compatibility.
|
|
21
|
+
isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
|
|
22
|
+
mod
|
|
23
|
+
));
|
|
2
24
|
|
|
3
25
|
// src/node/worker-thread-entry.ts
|
|
26
|
+
var import_node_path = __toESM(require("path"), 1);
|
|
27
|
+
var import_node_fs = require("fs");
|
|
28
|
+
var import_node_module = require("module");
|
|
4
29
|
var import_node_url = require("url");
|
|
5
30
|
var import_node_worker_threads = require("worker_threads");
|
|
6
|
-
var import_meta = {};
|
|
7
31
|
var port = import_node_worker_threads.parentPort;
|
|
8
32
|
if (!port) {
|
|
9
33
|
throw new Error("[stream-mdx] worker thread bootstrap missing parentPort.");
|
|
@@ -54,7 +78,7 @@ port.on("message", (data) => {
|
|
|
54
78
|
dispatchMessage(data);
|
|
55
79
|
});
|
|
56
80
|
var bootstrap = import_node_worker_threads.workerData ?? {};
|
|
57
|
-
var bundleUrl = typeof bootstrap.bundleUrl === "string" && bootstrap.bundleUrl.length > 0 ? bootstrap.bundleUrl :
|
|
81
|
+
var bundleUrl = typeof bootstrap.bundleUrl === "string" && bootstrap.bundleUrl.length > 0 ? bootstrap.bundleUrl : resolveDefaultBundleUrl().href;
|
|
58
82
|
void (async () => {
|
|
59
83
|
await import(bundleUrl);
|
|
60
84
|
ready = true;
|
|
@@ -68,8 +92,108 @@ void (async () => {
|
|
|
68
92
|
throw error;
|
|
69
93
|
});
|
|
70
94
|
function getModuleUrl() {
|
|
95
|
+
const fromImportMeta = getImportMetaUrl();
|
|
96
|
+
if (fromImportMeta) {
|
|
97
|
+
return fromImportMeta;
|
|
98
|
+
}
|
|
71
99
|
if (typeof __filename === "string" && __filename.length > 0) {
|
|
72
100
|
return (0, import_node_url.pathToFileURL)(__filename).href;
|
|
73
101
|
}
|
|
74
|
-
|
|
102
|
+
const fromStack = getModuleUrlFromStack();
|
|
103
|
+
if (fromStack) {
|
|
104
|
+
return fromStack;
|
|
105
|
+
}
|
|
106
|
+
throw new Error("[stream-mdx] Unable to resolve worker-thread module URL.");
|
|
107
|
+
}
|
|
108
|
+
function getImportMetaUrl() {
|
|
109
|
+
try {
|
|
110
|
+
const candidate = (0, eval)("import.meta.url");
|
|
111
|
+
return typeof candidate === "string" && candidate.length > 0 ? candidate : void 0;
|
|
112
|
+
} catch {
|
|
113
|
+
return void 0;
|
|
114
|
+
}
|
|
115
|
+
}
|
|
116
|
+
function resolveDefaultBundleUrl() {
|
|
117
|
+
const packageRoot = resolvePackageRootFromRequire();
|
|
118
|
+
if (packageRoot) {
|
|
119
|
+
const resolved = firstExistingPath([
|
|
120
|
+
import_node_path.default.join(packageRoot, "dist/hosted/markdown-worker.js"),
|
|
121
|
+
import_node_path.default.join(packageRoot, "dist/worker.mjs"),
|
|
122
|
+
import_node_path.default.join(packageRoot, "dist/worker.js"),
|
|
123
|
+
import_node_path.default.join(packageRoot, "src/worker.ts")
|
|
124
|
+
]);
|
|
125
|
+
if (resolved) {
|
|
126
|
+
return (0, import_node_url.pathToFileURL)(resolved);
|
|
127
|
+
}
|
|
128
|
+
}
|
|
129
|
+
const moduleUrl = getModuleUrl();
|
|
130
|
+
const candidates = [
|
|
131
|
+
new URL("../hosted/markdown-worker.js", moduleUrl),
|
|
132
|
+
new URL("../../dist/hosted/markdown-worker.js", moduleUrl),
|
|
133
|
+
new URL("../../../public/workers/markdown-worker.js", moduleUrl),
|
|
134
|
+
new URL("../../../../public/workers/markdown-worker.js", moduleUrl),
|
|
135
|
+
new URL("../../dist/worker.mjs", moduleUrl),
|
|
136
|
+
new URL("../../dist/worker.js", moduleUrl),
|
|
137
|
+
new URL("../worker.mjs", moduleUrl),
|
|
138
|
+
new URL("../worker.js", moduleUrl),
|
|
139
|
+
new URL("../worker.ts", moduleUrl)
|
|
140
|
+
];
|
|
141
|
+
for (const candidate of candidates) {
|
|
142
|
+
if (urlExists(candidate)) return candidate;
|
|
143
|
+
}
|
|
144
|
+
return candidates[0];
|
|
145
|
+
}
|
|
146
|
+
function urlExists(url) {
|
|
147
|
+
if (url.protocol !== "file:") return false;
|
|
148
|
+
try {
|
|
149
|
+
return (0, import_node_fs.existsSync)(import_node_path.default.normalize((0, import_node_url.fileURLToPath)(url)));
|
|
150
|
+
} catch {
|
|
151
|
+
return false;
|
|
152
|
+
}
|
|
153
|
+
}
|
|
154
|
+
function firstExistingPath(candidates) {
|
|
155
|
+
for (const candidate of candidates) {
|
|
156
|
+
try {
|
|
157
|
+
if ((0, import_node_fs.existsSync)(candidate)) return candidate;
|
|
158
|
+
} catch {
|
|
159
|
+
}
|
|
160
|
+
}
|
|
161
|
+
return void 0;
|
|
162
|
+
}
|
|
163
|
+
function resolvePackageRootFromRequire() {
|
|
164
|
+
const bases = [
|
|
165
|
+
import_node_path.default.join(process.cwd(), "package.json"),
|
|
166
|
+
typeof __filename === "string" && __filename.length > 0 ? __filename : import_node_path.default.join(process.cwd(), "__stream-mdx-worker-thread-resolver__.cjs")
|
|
167
|
+
];
|
|
168
|
+
for (const base of bases) {
|
|
169
|
+
try {
|
|
170
|
+
const req = (0, import_node_module.createRequire)(base);
|
|
171
|
+
const nodeEntry = req.resolve("@stream-mdx/worker/node");
|
|
172
|
+
return import_node_path.default.resolve(import_node_path.default.dirname(nodeEntry), "..", "..");
|
|
173
|
+
} catch {
|
|
174
|
+
}
|
|
175
|
+
}
|
|
176
|
+
return void 0;
|
|
177
|
+
}
|
|
178
|
+
function getModuleUrlFromStack() {
|
|
179
|
+
const previous = Error.prepareStackTrace;
|
|
180
|
+
try {
|
|
181
|
+
Error.prepareStackTrace = (_error, stackTrace2) => stackTrace2;
|
|
182
|
+
const stackTrace = new Error().stack;
|
|
183
|
+
if (!Array.isArray(stackTrace)) return void 0;
|
|
184
|
+
const files = stackTrace.map((frame) => frame.getFileName()).filter((fileName) => Boolean(fileName));
|
|
185
|
+
const preferred = files.find(
|
|
186
|
+
(fileName) => fileName.includes("/markdown-v2-worker/src/node/worker-thread-entry.") || fileName.includes("/markdown-v2-worker/dist/node/worker-thread-entry.") || fileName.includes("\\markdown-v2-worker\\src\\node\\worker-thread-entry.") || fileName.includes("\\markdown-v2-worker\\dist\\node\\worker-thread-entry.")
|
|
187
|
+
);
|
|
188
|
+
if (preferred) {
|
|
189
|
+
return preferred.startsWith("file://") ? preferred : (0, import_node_url.pathToFileURL)(preferred).href;
|
|
190
|
+
}
|
|
191
|
+
const firstAbsolute = files.find((fileName) => fileName.startsWith("file://") || import_node_path.default.isAbsolute(fileName));
|
|
192
|
+
if (!firstAbsolute) return void 0;
|
|
193
|
+
return firstAbsolute.startsWith("file://") ? firstAbsolute : (0, import_node_url.pathToFileURL)(firstAbsolute).href;
|
|
194
|
+
} catch {
|
|
195
|
+
return void 0;
|
|
196
|
+
} finally {
|
|
197
|
+
Error.prepareStackTrace = previous;
|
|
198
|
+
}
|
|
75
199
|
}
|
|
@@ -1 +1,2 @@
|
|
|
1
|
-
|
|
1
|
+
|
|
2
|
+
export { }
|
|
@@ -1 +1,2 @@
|
|
|
1
|
-
|
|
1
|
+
|
|
2
|
+
export { }
|