@stream-mdx/worker 0.3.0 → 0.4.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -30,20 +30,54 @@ var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: tru
30
30
  // src/node/index.ts
31
31
  var node_exports = {};
32
32
  __export(node_exports, {
33
+ compileMarkdownSnapshot: () => compileMarkdownSnapshot,
34
+ computeSnapshotHash: () => computeSnapshotHash,
35
+ createCompileMarkdownSnapshotPool: () => createCompileMarkdownSnapshotPool,
33
36
  createWorkerThread: () => createWorkerThread,
34
37
  getHostedWorkerBundleUrl: () => getHostedWorkerBundleUrl
35
38
  });
36
39
  module.exports = __toCommonJS(node_exports);
40
+ var import_node_crypto = require("crypto");
41
+ var import_node_fs = require("fs");
42
+ var import_node_fs2 = require("fs");
43
+ var import_node_module = require("module");
37
44
  var import_node_path = __toESM(require("path"), 1);
38
45
  var import_node_url = require("url");
39
46
  var import_node_worker_threads = require("worker_threads");
40
- var import_meta = {};
47
+ var import_core = require("@stream-mdx/core");
41
48
  function getHostedWorkerBundleUrl() {
42
- return new URL("../hosted/markdown-worker.js", getModuleUrl());
49
+ const packageRoot = resolvePackageRootFromRequire();
50
+ if (packageRoot) {
51
+ const resolved = firstExistingPath([
52
+ import_node_path.default.join(packageRoot, "dist/hosted/markdown-worker.js"),
53
+ import_node_path.default.join(packageRoot, "dist/worker.mjs"),
54
+ import_node_path.default.join(packageRoot, "dist/worker.js"),
55
+ import_node_path.default.join(packageRoot, "src/worker.ts")
56
+ ]);
57
+ if (resolved) {
58
+ return (0, import_node_url.pathToFileURL)(resolved);
59
+ }
60
+ }
61
+ const moduleUrl = getModuleUrl();
62
+ const candidates = [
63
+ new URL("../hosted/markdown-worker.js", moduleUrl),
64
+ new URL("../../dist/hosted/markdown-worker.js", moduleUrl),
65
+ new URL("../../../public/workers/markdown-worker.js", moduleUrl),
66
+ new URL("../../../../public/workers/markdown-worker.js", moduleUrl),
67
+ new URL("../../dist/worker.mjs", moduleUrl),
68
+ new URL("../../dist/worker.js", moduleUrl),
69
+ new URL("../worker.mjs", moduleUrl),
70
+ new URL("../worker.js", moduleUrl),
71
+ new URL("../worker.ts", moduleUrl)
72
+ ];
73
+ for (const candidate of candidates) {
74
+ if (urlExists(candidate)) return candidate;
75
+ }
76
+ return candidates[0];
43
77
  }
44
78
  function createWorkerThread(options = {}) {
45
79
  const { workerBundle, workerData, ...workerOptions } = options;
46
- const runnerUrl = new URL("./worker-thread-entry.mjs", getModuleUrl());
80
+ const runnerUrl = resolveWorkerThreadEntryUrl();
47
81
  const bundleUrl = normalizeWorkerBundleUrl(workerBundle) ?? getHostedWorkerBundleUrl();
48
82
  return new import_node_worker_threads.Worker(runnerUrl, {
49
83
  ...workerOptions,
@@ -53,6 +87,37 @@ function createWorkerThread(options = {}) {
53
87
  }
54
88
  });
55
89
  }
90
+ function resolveWorkerThreadEntryUrl() {
91
+ const packageRoot = resolvePackageRootFromRequire();
92
+ if (packageRoot) {
93
+ const resolved = firstExistingPath([
94
+ import_node_path.default.join(packageRoot, "dist/node/worker-thread-entry.mjs"),
95
+ import_node_path.default.join(packageRoot, "dist/node/worker-thread-entry.cjs"),
96
+ import_node_path.default.join(packageRoot, "src/node/worker-thread-entry.ts")
97
+ ]);
98
+ if (resolved) {
99
+ return (0, import_node_url.pathToFileURL)(resolved);
100
+ }
101
+ }
102
+ const moduleUrl = getModuleUrl();
103
+ const distEntry = new URL("./worker-thread-entry.mjs", moduleUrl);
104
+ const sourceEntry = new URL("./worker-thread-entry.ts", moduleUrl);
105
+ if (urlExists(distEntry)) {
106
+ return distEntry;
107
+ }
108
+ if (urlExists(sourceEntry)) {
109
+ return sourceEntry;
110
+ }
111
+ return distEntry;
112
+ }
113
+ function urlExists(url) {
114
+ if (url.protocol !== "file:") return false;
115
+ try {
116
+ return (0, import_node_fs.existsSync)(import_node_path.default.normalize((0, import_node_url.fileURLToPath)(url)));
117
+ } catch {
118
+ return false;
119
+ }
120
+ }
56
121
  function normalizeWorkerBundleUrl(value) {
57
122
  if (!value) return void 0;
58
123
  if (value instanceof URL) return value;
@@ -62,14 +127,328 @@ function normalizeWorkerBundleUrl(value) {
62
127
  return (0, import_node_url.pathToFileURL)(import_node_path.default.resolve(value));
63
128
  }
64
129
  }
130
+ function firstExistingPath(candidates) {
131
+ for (const candidate of candidates) {
132
+ try {
133
+ if ((0, import_node_fs.existsSync)(candidate)) return candidate;
134
+ } catch {
135
+ }
136
+ }
137
+ return void 0;
138
+ }
139
+ function resolvePackageRootFromRequire() {
140
+ const bases = [
141
+ import_node_path.default.join(process.cwd(), "package.json"),
142
+ typeof __filename === "string" && __filename.length > 0 ? __filename : import_node_path.default.join(process.cwd(), "__stream-mdx-node-resolver__.cjs")
143
+ ];
144
+ for (const base of bases) {
145
+ try {
146
+ const req = (0, import_node_module.createRequire)(base);
147
+ const nodeEntry = req.resolve("@stream-mdx/worker/node");
148
+ return import_node_path.default.resolve(import_node_path.default.dirname(nodeEntry), "..", "..");
149
+ } catch {
150
+ }
151
+ }
152
+ return void 0;
153
+ }
65
154
  function getModuleUrl() {
155
+ const fromImportMeta = getImportMetaUrl();
156
+ if (fromImportMeta) {
157
+ return fromImportMeta;
158
+ }
66
159
  if (typeof __filename === "string" && __filename.length > 0) {
67
160
  return (0, import_node_url.pathToFileURL)(__filename).href;
68
161
  }
69
- return import_meta.url;
162
+ const fromStack = getModuleUrlFromStack();
163
+ if (fromStack) {
164
+ return fromStack;
165
+ }
166
+ throw new Error("[stream-mdx] Unable to resolve module URL.");
167
+ }
168
+ function getImportMetaUrl() {
169
+ try {
170
+ const candidate = (0, eval)("import.meta.url");
171
+ return typeof candidate === "string" && candidate.length > 0 ? candidate : void 0;
172
+ } catch {
173
+ return void 0;
174
+ }
175
+ }
176
+ function getModuleUrlFromStack() {
177
+ const previous = Error.prepareStackTrace;
178
+ try {
179
+ Error.prepareStackTrace = (_error, stackTrace2) => stackTrace2;
180
+ const stackTrace = new Error().stack;
181
+ if (!Array.isArray(stackTrace)) return void 0;
182
+ const files = stackTrace.map((frame) => frame.getFileName()).filter((fileName) => Boolean(fileName));
183
+ const preferred = files.find(
184
+ (fileName) => fileName.includes("/markdown-v2-worker/src/node/index.") || fileName.includes("/markdown-v2-worker/dist/node/index.") || fileName.includes("\\markdown-v2-worker\\src\\node\\index.") || fileName.includes("\\markdown-v2-worker\\dist\\node\\index.")
185
+ );
186
+ if (preferred) {
187
+ return preferred.startsWith("file://") ? preferred : (0, import_node_url.pathToFileURL)(preferred).href;
188
+ }
189
+ const firstAbsolute = files.find((fileName) => fileName.startsWith("file://") || import_node_path.default.isAbsolute(fileName));
190
+ if (!firstAbsolute) return void 0;
191
+ return firstAbsolute.startsWith("file://") ? firstAbsolute : (0, import_node_url.pathToFileURL)(firstAbsolute).href;
192
+ } catch {
193
+ return void 0;
194
+ } finally {
195
+ Error.prepareStackTrace = previous;
196
+ }
197
+ }
198
+ function computeSnapshotHash(text, init, hashSalt) {
199
+ return hashCompileInput(text, init, hashSalt);
200
+ }
201
+ async function compileMarkdownSnapshot(options) {
202
+ const {
203
+ text,
204
+ init,
205
+ hashSalt,
206
+ worker: providedWorker,
207
+ workerOptions,
208
+ timeoutMs = 3e4,
209
+ settleMs = 50,
210
+ cache,
211
+ finalize = true
212
+ } = options;
213
+ const hash = hashCompileInput(text, init, hashSalt);
214
+ const cacheKey = cache?.key ?? hash;
215
+ const cachePath = cache ? import_node_path.default.join(cache.dir, `${sanitizeCacheKey(cacheKey)}.json`) : null;
216
+ if (cachePath) {
217
+ const cached = await readSnapshotCache(cachePath, hash);
218
+ if (cached) {
219
+ const snapshot = (0, import_core.createInitialSnapshot)(cached.blocks);
220
+ return {
221
+ blocks: snapshot.blocks,
222
+ snapshot,
223
+ artifact: cached,
224
+ fromCache: true
225
+ };
226
+ }
227
+ }
228
+ const worker = providedWorker ?? createWorkerThread(workerOptions);
229
+ const ownedWorker = !providedWorker;
230
+ return await new Promise((resolve, reject) => {
231
+ let snapshot = null;
232
+ let initialized = false;
233
+ let settled = false;
234
+ let idleTimer = null;
235
+ let timeoutTimer = null;
236
+ const finalizeNow = () => {
237
+ if (settled) return;
238
+ settled = true;
239
+ cleanup();
240
+ const finalSnapshot = snapshot ?? (0, import_core.createInitialSnapshot)();
241
+ const blocks = finalSnapshot.blocks;
242
+ const contentHash = (0, import_node_crypto.createHash)("sha256").update(text).digest("hex");
243
+ const configHash = (0, import_node_crypto.createHash)("sha256").update(
244
+ stableStringify({
245
+ init: init ?? null,
246
+ salt: hashSalt ?? null
247
+ })
248
+ ).digest("hex");
249
+ const artifact = {
250
+ version: 1,
251
+ schemaId: "streammdx.snapshot.v1",
252
+ createdAt: (/* @__PURE__ */ new Date()).toISOString(),
253
+ hash,
254
+ contentHash,
255
+ configHash,
256
+ hashSalt: hashSalt ?? void 0,
257
+ blocks,
258
+ tocHeadings: (() => {
259
+ const root = finalSnapshot.nodes.get(import_core.PATCH_ROOT_ID);
260
+ const maybe = root?.props?.tocHeadings;
261
+ return Array.isArray(maybe) ? maybe : void 0;
262
+ })(),
263
+ init: init ? {
264
+ docPlugins: init.docPlugins,
265
+ mdx: init.mdx,
266
+ prewarmLangs: init.prewarmLangs
267
+ } : void 0
268
+ };
269
+ if (cachePath && !cache?.readOnly) {
270
+ void writeSnapshotCache(cachePath, artifact);
271
+ }
272
+ resolve({
273
+ blocks,
274
+ snapshot: finalSnapshot,
275
+ artifact,
276
+ fromCache: false
277
+ });
278
+ };
279
+ const fail = (error) => {
280
+ if (settled) return;
281
+ settled = true;
282
+ cleanup();
283
+ reject(error);
284
+ };
285
+ const scheduleIdleFinalize = () => {
286
+ if (finalize || !initialized) return;
287
+ if (idleTimer) {
288
+ clearTimeout(idleTimer);
289
+ }
290
+ idleTimer = setTimeout(() => finalizeNow(), settleMs);
291
+ };
292
+ const handleMessage = (message) => {
293
+ switch (message.type) {
294
+ case "INITIALIZED":
295
+ snapshot = (0, import_core.createInitialSnapshot)(message.blocks);
296
+ initialized = true;
297
+ scheduleIdleFinalize();
298
+ break;
299
+ case "PATCH":
300
+ if (snapshot) {
301
+ snapshot.blocks = (0, import_core.applyPatchBatch)(snapshot, message.patches);
302
+ }
303
+ scheduleIdleFinalize();
304
+ break;
305
+ case "FINALIZED":
306
+ if (finalize) {
307
+ finalizeNow();
308
+ }
309
+ break;
310
+ case "RESET":
311
+ fail(new Error(`Worker reset during compile: ${message.reason}`));
312
+ break;
313
+ case "ERROR":
314
+ fail(new Error(`Worker error (${message.phase}): ${message.error.message}`));
315
+ break;
316
+ default:
317
+ break;
318
+ }
319
+ };
320
+ const cleanup = () => {
321
+ if (idleTimer) clearTimeout(idleTimer);
322
+ if (timeoutTimer) clearTimeout(timeoutTimer);
323
+ worker.off("message", handleMessage);
324
+ worker.off("error", fail);
325
+ if (ownedWorker) {
326
+ try {
327
+ worker.terminate();
328
+ } catch {
329
+ }
330
+ }
331
+ };
332
+ worker.on("message", handleMessage);
333
+ worker.on("error", fail);
334
+ timeoutTimer = setTimeout(() => {
335
+ fail(new Error("Worker compile timed out."));
336
+ }, timeoutMs);
337
+ const initMessage = {
338
+ type: "INIT",
339
+ initialContent: text,
340
+ prewarmLangs: init?.prewarmLangs,
341
+ docPlugins: init?.docPlugins,
342
+ mdx: init?.mdx
343
+ };
344
+ worker.postMessage(initMessage);
345
+ if (finalize) {
346
+ worker.postMessage({ type: "FINALIZE" });
347
+ }
348
+ });
349
+ }
350
+ function createCompileMarkdownSnapshotPool(options = {}) {
351
+ const size = Math.max(1, Math.min(8, Math.floor(options.size ?? 2)));
352
+ const workers = new Array(size).fill(null).map(() => createWorkerThread(options.workerOptions));
353
+ const queues = new Array(size).fill(Promise.resolve());
354
+ let rr = 0;
355
+ const maxMemoryEntries = Math.max(0, Math.floor(options.maxMemoryEntries ?? 64));
356
+ const memory = maxMemoryEntries > 0 ? /* @__PURE__ */ new Map() : null;
357
+ const enqueue = (index, fn) => {
358
+ let resolveOuter;
359
+ let rejectOuter;
360
+ const outer = new Promise((resolve, reject) => {
361
+ resolveOuter = resolve;
362
+ rejectOuter = reject;
363
+ });
364
+ queues[index] = queues[index].catch(() => void 0).then(async () => {
365
+ try {
366
+ const value = await fn();
367
+ resolveOuter(value);
368
+ } catch (err) {
369
+ rejectOuter(err);
370
+ }
371
+ }).then(() => void 0);
372
+ return outer;
373
+ };
374
+ const compile = async (compileOptions) => {
375
+ const hash = computeSnapshotHash(compileOptions.text, compileOptions.init, compileOptions.hashSalt);
376
+ if (memory && memory.has(hash)) {
377
+ return memory.get(hash);
378
+ }
379
+ const index = rr++ % workers.length;
380
+ const result = await enqueue(index, async () => {
381
+ return await compileMarkdownSnapshot({
382
+ ...compileOptions,
383
+ worker: workers[index]
384
+ });
385
+ });
386
+ if (memory) {
387
+ memory.set(hash, result);
388
+ if (memory.size > maxMemoryEntries) {
389
+ const firstKey = memory.keys().next().value;
390
+ if (firstKey) memory.delete(firstKey);
391
+ }
392
+ }
393
+ return result;
394
+ };
395
+ const close = async () => {
396
+ for (const worker of workers) {
397
+ try {
398
+ await worker.terminate();
399
+ } catch {
400
+ }
401
+ }
402
+ };
403
+ return { compile, close };
404
+ }
405
+ async function readSnapshotCache(cachePath, hash) {
406
+ try {
407
+ const raw = await import_node_fs2.promises.readFile(cachePath, "utf8");
408
+ const parsed = JSON.parse(raw);
409
+ if (!parsed || parsed.version !== 1 || parsed.hash !== hash || !Array.isArray(parsed.blocks)) {
410
+ return null;
411
+ }
412
+ return parsed;
413
+ } catch {
414
+ return null;
415
+ }
416
+ }
417
+ async function writeSnapshotCache(cachePath, artifact) {
418
+ try {
419
+ await import_node_fs2.promises.mkdir(import_node_path.default.dirname(cachePath), { recursive: true });
420
+ await import_node_fs2.promises.writeFile(cachePath, JSON.stringify(artifact, null, 2), "utf8");
421
+ } catch {
422
+ }
423
+ }
424
+ function sanitizeCacheKey(value) {
425
+ return value.replace(/[^a-z0-9._-]+/gi, "_");
426
+ }
427
+ function hashCompileInput(text, init, hashSalt) {
428
+ const payload = stableStringify({
429
+ text,
430
+ init: init ?? null,
431
+ salt: hashSalt ?? null
432
+ });
433
+ return (0, import_node_crypto.createHash)("sha256").update(payload).digest("hex");
434
+ }
435
+ function stableStringify(value) {
436
+ if (value === null || typeof value !== "object") {
437
+ const primitive = JSON.stringify(value);
438
+ return primitive === void 0 ? "null" : primitive;
439
+ }
440
+ if (Array.isArray(value)) {
441
+ return `[${value.map((item) => stableStringify(item)).join(",")}]`;
442
+ }
443
+ const entries = Object.entries(value).sort(([a], [b]) => a.localeCompare(b));
444
+ const body = entries.map(([key, val]) => `${JSON.stringify(key)}:${stableStringify(val)}`).join(",");
445
+ return `{${body}}`;
70
446
  }
71
447
  // Annotate the CommonJS export names for ESM import in node:
72
448
  0 && (module.exports = {
449
+ compileMarkdownSnapshot,
450
+ computeSnapshotHash,
451
+ createCompileMarkdownSnapshotPool,
73
452
  createWorkerThread,
74
453
  getHostedWorkerBundleUrl
75
454
  });
@@ -1,4 +1,5 @@
1
1
  import { WorkerOptions, Worker } from 'node:worker_threads';
2
+ import { Block, TocHeading, WorkerIn, DocumentSnapshot } from '@stream-mdx/core';
2
3
 
3
4
  interface CreateWorkerThreadOptions extends Omit<WorkerOptions, "type" | "workerData"> {
4
5
  /**
@@ -25,5 +26,71 @@ declare function getHostedWorkerBundleUrl(): URL;
25
26
  * so the same hosted bundle used in browsers can run under Node.
26
27
  */
27
28
  declare function createWorkerThread(options?: CreateWorkerThreadOptions): Worker;
29
+ type WorkerInitMessage = Extract<WorkerIn, {
30
+ type: "INIT";
31
+ }>;
32
+ interface SnapshotArtifactV1 {
33
+ version: 1;
34
+ schemaId: "streammdx.snapshot.v1";
35
+ createdAt: string;
36
+ hash: string;
37
+ contentHash: string;
38
+ configHash: string;
39
+ hashSalt?: string;
40
+ blocks: Block[];
41
+ tocHeadings?: TocHeading[];
42
+ init?: {
43
+ docPlugins?: WorkerInitMessage["docPlugins"];
44
+ mdx?: WorkerInitMessage["mdx"];
45
+ prewarmLangs?: string[];
46
+ };
47
+ }
48
+ interface CompileMarkdownSnapshotOptions {
49
+ text: string;
50
+ init?: Omit<WorkerInitMessage, "type" | "initialContent">;
51
+ /**
52
+ * Optional salt mixed into the snapshot hash and cache key.
53
+ *
54
+ * This exists so callers can invalidate on renderer/compiler changes (not just
55
+ * input text + init), while keeping deterministic outputs for a given salt.
56
+ */
57
+ hashSalt?: string;
58
+ worker?: Worker;
59
+ workerOptions?: CreateWorkerThreadOptions;
60
+ timeoutMs?: number;
61
+ settleMs?: number;
62
+ cache?: {
63
+ dir: string;
64
+ key?: string;
65
+ readOnly?: boolean;
66
+ };
67
+ finalize?: boolean;
68
+ }
69
+ interface CompileMarkdownSnapshotResult {
70
+ blocks: Block[];
71
+ snapshot: DocumentSnapshot;
72
+ artifact: SnapshotArtifactV1;
73
+ fromCache: boolean;
74
+ }
75
+ declare function computeSnapshotHash(text: string, init?: Omit<WorkerInitMessage, "type" | "initialContent">, hashSalt?: string): string;
76
+ declare function compileMarkdownSnapshot(options: CompileMarkdownSnapshotOptions): Promise<CompileMarkdownSnapshotResult>;
77
+ interface CompileMarkdownSnapshotPoolOptions {
78
+ /**
79
+ * Number of worker_threads to keep warm.
80
+ * Each worker is single-flight (requests are queued per worker).
81
+ */
82
+ size?: number;
83
+ workerOptions?: CreateWorkerThreadOptions;
84
+ /**
85
+ * In-memory cache entries keyed by the snapshot hash.
86
+ * Set to 0 to disable.
87
+ */
88
+ maxMemoryEntries?: number;
89
+ }
90
+ interface CompileMarkdownSnapshotPool {
91
+ compile(options: Omit<CompileMarkdownSnapshotOptions, "worker">): Promise<CompileMarkdownSnapshotResult>;
92
+ close(): Promise<void>;
93
+ }
94
+ declare function createCompileMarkdownSnapshotPool(options?: CompileMarkdownSnapshotPoolOptions): CompileMarkdownSnapshotPool;
28
95
 
29
- export { type CreateWorkerThreadOptions, createWorkerThread, getHostedWorkerBundleUrl };
96
+ export { type CompileMarkdownSnapshotOptions, type CompileMarkdownSnapshotPool, type CompileMarkdownSnapshotPoolOptions, type CompileMarkdownSnapshotResult, type CreateWorkerThreadOptions, type SnapshotArtifactV1, compileMarkdownSnapshot, computeSnapshotHash, createCompileMarkdownSnapshotPool, createWorkerThread, getHostedWorkerBundleUrl };
@@ -1,4 +1,5 @@
1
1
  import { WorkerOptions, Worker } from 'node:worker_threads';
2
+ import { Block, TocHeading, WorkerIn, DocumentSnapshot } from '@stream-mdx/core';
2
3
 
3
4
  interface CreateWorkerThreadOptions extends Omit<WorkerOptions, "type" | "workerData"> {
4
5
  /**
@@ -25,5 +26,71 @@ declare function getHostedWorkerBundleUrl(): URL;
25
26
  * so the same hosted bundle used in browsers can run under Node.
26
27
  */
27
28
  declare function createWorkerThread(options?: CreateWorkerThreadOptions): Worker;
29
+ type WorkerInitMessage = Extract<WorkerIn, {
30
+ type: "INIT";
31
+ }>;
32
+ interface SnapshotArtifactV1 {
33
+ version: 1;
34
+ schemaId: "streammdx.snapshot.v1";
35
+ createdAt: string;
36
+ hash: string;
37
+ contentHash: string;
38
+ configHash: string;
39
+ hashSalt?: string;
40
+ blocks: Block[];
41
+ tocHeadings?: TocHeading[];
42
+ init?: {
43
+ docPlugins?: WorkerInitMessage["docPlugins"];
44
+ mdx?: WorkerInitMessage["mdx"];
45
+ prewarmLangs?: string[];
46
+ };
47
+ }
48
+ interface CompileMarkdownSnapshotOptions {
49
+ text: string;
50
+ init?: Omit<WorkerInitMessage, "type" | "initialContent">;
51
+ /**
52
+ * Optional salt mixed into the snapshot hash and cache key.
53
+ *
54
+ * This exists so callers can invalidate on renderer/compiler changes (not just
55
+ * input text + init), while keeping deterministic outputs for a given salt.
56
+ */
57
+ hashSalt?: string;
58
+ worker?: Worker;
59
+ workerOptions?: CreateWorkerThreadOptions;
60
+ timeoutMs?: number;
61
+ settleMs?: number;
62
+ cache?: {
63
+ dir: string;
64
+ key?: string;
65
+ readOnly?: boolean;
66
+ };
67
+ finalize?: boolean;
68
+ }
69
+ interface CompileMarkdownSnapshotResult {
70
+ blocks: Block[];
71
+ snapshot: DocumentSnapshot;
72
+ artifact: SnapshotArtifactV1;
73
+ fromCache: boolean;
74
+ }
75
+ declare function computeSnapshotHash(text: string, init?: Omit<WorkerInitMessage, "type" | "initialContent">, hashSalt?: string): string;
76
+ declare function compileMarkdownSnapshot(options: CompileMarkdownSnapshotOptions): Promise<CompileMarkdownSnapshotResult>;
77
+ interface CompileMarkdownSnapshotPoolOptions {
78
+ /**
79
+ * Number of worker_threads to keep warm.
80
+ * Each worker is single-flight (requests are queued per worker).
81
+ */
82
+ size?: number;
83
+ workerOptions?: CreateWorkerThreadOptions;
84
+ /**
85
+ * In-memory cache entries keyed by the snapshot hash.
86
+ * Set to 0 to disable.
87
+ */
88
+ maxMemoryEntries?: number;
89
+ }
90
+ interface CompileMarkdownSnapshotPool {
91
+ compile(options: Omit<CompileMarkdownSnapshotOptions, "worker">): Promise<CompileMarkdownSnapshotResult>;
92
+ close(): Promise<void>;
93
+ }
94
+ declare function createCompileMarkdownSnapshotPool(options?: CompileMarkdownSnapshotPoolOptions): CompileMarkdownSnapshotPool;
28
95
 
29
- export { type CreateWorkerThreadOptions, createWorkerThread, getHostedWorkerBundleUrl };
96
+ export { type CompileMarkdownSnapshotOptions, type CompileMarkdownSnapshotPool, type CompileMarkdownSnapshotPoolOptions, type CompileMarkdownSnapshotResult, type CreateWorkerThreadOptions, type SnapshotArtifactV1, compileMarkdownSnapshot, computeSnapshotHash, createCompileMarkdownSnapshotPool, createWorkerThread, getHostedWorkerBundleUrl };