framewebworker 0.1.4 → 0.2.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/render.js ADDED
@@ -0,0 +1,855 @@
1
+ // src/captions.ts
2
+ var STYLE_PRESETS = {
3
+ hormozi: {
4
+ preset: "hormozi",
5
+ fontFamily: 'Impact, "Arial Black", sans-serif',
6
+ fontSize: 64,
7
+ fontWeight: "900",
8
+ color: "#FFFFFF",
9
+ strokeColor: "#000000",
10
+ strokeWidth: 4,
11
+ backgroundColor: "transparent",
12
+ backgroundPadding: 0,
13
+ backgroundRadius: 0,
14
+ position: "bottom",
15
+ textAlign: "center",
16
+ lineHeight: 1.1,
17
+ maxWidth: 0.9,
18
+ shadow: true,
19
+ shadowColor: "rgba(0,0,0,0.9)",
20
+ shadowBlur: 6,
21
+ shadowOffsetX: 2,
22
+ shadowOffsetY: 2,
23
+ uppercase: true,
24
+ wordHighlight: true,
25
+ wordHighlightColor: "#FFD700",
26
+ wordHighlightTextColor: "#000000"
27
+ },
28
+ modern: {
29
+ preset: "modern",
30
+ fontFamily: '"Inter", "Helvetica Neue", Arial, sans-serif',
31
+ fontSize: 42,
32
+ fontWeight: "700",
33
+ color: "#FFFFFF",
34
+ strokeColor: "transparent",
35
+ strokeWidth: 0,
36
+ backgroundColor: "rgba(0,0,0,0.65)",
37
+ backgroundPadding: 12,
38
+ backgroundRadius: 8,
39
+ position: "bottom",
40
+ textAlign: "center",
41
+ lineHeight: 1.3,
42
+ maxWidth: 0.85,
43
+ shadow: false,
44
+ shadowColor: "transparent",
45
+ shadowBlur: 0,
46
+ shadowOffsetX: 0,
47
+ shadowOffsetY: 0,
48
+ uppercase: false,
49
+ wordHighlight: false,
50
+ wordHighlightColor: "#3B82F6",
51
+ wordHighlightTextColor: "#FFFFFF"
52
+ },
53
+ minimal: {
54
+ preset: "minimal",
55
+ fontFamily: '"Helvetica Neue", Arial, sans-serif',
56
+ fontSize: 36,
57
+ fontWeight: "400",
58
+ color: "#FFFFFF",
59
+ strokeColor: "transparent",
60
+ strokeWidth: 0,
61
+ backgroundColor: "transparent",
62
+ backgroundPadding: 0,
63
+ backgroundRadius: 0,
64
+ position: "bottom",
65
+ textAlign: "center",
66
+ lineHeight: 1.4,
67
+ maxWidth: 0.8,
68
+ shadow: true,
69
+ shadowColor: "rgba(0,0,0,0.8)",
70
+ shadowBlur: 8,
71
+ shadowOffsetX: 0,
72
+ shadowOffsetY: 2,
73
+ uppercase: false,
74
+ wordHighlight: false,
75
+ wordHighlightColor: "#FFFFFF",
76
+ wordHighlightTextColor: "#000000"
77
+ },
78
+ bold: {
79
+ preset: "bold",
80
+ fontFamily: '"Arial Black", "Helvetica Neue", Arial, sans-serif',
81
+ fontSize: 56,
82
+ fontWeight: "900",
83
+ color: "#FFFF00",
84
+ strokeColor: "#000000",
85
+ strokeWidth: 5,
86
+ backgroundColor: "transparent",
87
+ backgroundPadding: 0,
88
+ backgroundRadius: 0,
89
+ position: "center",
90
+ textAlign: "center",
91
+ lineHeight: 1.2,
92
+ maxWidth: 0.88,
93
+ shadow: true,
94
+ shadowColor: "rgba(0,0,0,1)",
95
+ shadowBlur: 4,
96
+ shadowOffsetX: 3,
97
+ shadowOffsetY: 3,
98
+ uppercase: true,
99
+ wordHighlight: false,
100
+ wordHighlightColor: "#FF0000",
101
+ wordHighlightTextColor: "#FFFFFF"
102
+ }
103
+ };
104
+ function mergeStyle(base, overrides) {
105
+ return overrides ? { ...base, ...overrides } : base;
106
+ }
107
+ function getActiveCaptions(segments, currentTime) {
108
+ return segments.filter(
109
+ (seg) => currentTime >= seg.startTime && currentTime < seg.endTime
110
+ );
111
+ }
112
+ function wrapText(ctx, text, maxWidth) {
113
+ const words = text.split(" ");
114
+ const lines = [];
115
+ let current = "";
116
+ for (const word of words) {
117
+ const test = current ? `${current} ${word}` : word;
118
+ if (ctx.measureText(test).width > maxWidth && current) {
119
+ lines.push(current);
120
+ current = word;
121
+ } else {
122
+ current = test;
123
+ }
124
+ }
125
+ if (current) lines.push(current);
126
+ return lines;
127
+ }
128
+ function renderCaption(ctx, segment, resolvedStyle, canvasWidth, canvasHeight) {
129
+ const style = resolvedStyle;
130
+ const text = style.uppercase ? segment.text.toUpperCase() : segment.text;
131
+ ctx.save();
132
+ const scaledFontSize = style.fontSize / 1080 * canvasHeight;
133
+ ctx.font = `${style.fontWeight} ${scaledFontSize}px ${style.fontFamily}`;
134
+ ctx.textAlign = style.textAlign;
135
+ ctx.textBaseline = "bottom";
136
+ const maxPx = style.maxWidth * canvasWidth;
137
+ const lines = wrapText(ctx, text, maxPx);
138
+ const lineH = scaledFontSize * style.lineHeight;
139
+ const totalH = lines.length * lineH;
140
+ let baseY;
141
+ if (style.position === "top") {
142
+ baseY = scaledFontSize * 1.5;
143
+ } else if (style.position === "center") {
144
+ baseY = canvasHeight / 2 - totalH / 2 + lineH;
145
+ } else {
146
+ baseY = canvasHeight - scaledFontSize * 1.2;
147
+ }
148
+ const cx = canvasWidth / 2;
149
+ lines.forEach((line, i) => {
150
+ const y = baseY + i * lineH;
151
+ if (style.backgroundColor && style.backgroundColor !== "transparent") {
152
+ const metrics = ctx.measureText(line);
153
+ const bw = metrics.width + style.backgroundPadding * 2;
154
+ const bh = lineH + style.backgroundPadding;
155
+ const bx = cx - bw / 2;
156
+ const by = y - lineH;
157
+ ctx.fillStyle = style.backgroundColor;
158
+ if (style.backgroundRadius > 0) {
159
+ roundRect(ctx, bx, by, bw, bh, style.backgroundRadius);
160
+ ctx.fill();
161
+ } else {
162
+ ctx.fillRect(bx, by, bw, bh);
163
+ }
164
+ }
165
+ if (style.shadow) {
166
+ ctx.shadowColor = style.shadowColor;
167
+ ctx.shadowBlur = style.shadowBlur;
168
+ ctx.shadowOffsetX = style.shadowOffsetX;
169
+ ctx.shadowOffsetY = style.shadowOffsetY;
170
+ }
171
+ if (style.strokeWidth > 0 && style.strokeColor !== "transparent") {
172
+ ctx.lineWidth = style.strokeWidth;
173
+ ctx.strokeStyle = style.strokeColor;
174
+ ctx.strokeText(line, cx, y);
175
+ }
176
+ ctx.shadowColor = "transparent";
177
+ ctx.shadowBlur = 0;
178
+ ctx.fillStyle = style.color;
179
+ ctx.fillText(line, cx, y);
180
+ });
181
+ ctx.restore();
182
+ }
183
+ function roundRect(ctx, x, y, w, h, r) {
184
+ ctx.beginPath();
185
+ ctx.moveTo(x + r, y);
186
+ ctx.lineTo(x + w - r, y);
187
+ ctx.quadraticCurveTo(x + w, y, x + w, y + r);
188
+ ctx.lineTo(x + w, y + h - r);
189
+ ctx.quadraticCurveTo(x + w, y + h, x + w - r, y + h);
190
+ ctx.lineTo(x + r, y + h);
191
+ ctx.quadraticCurveTo(x, y + h, x, y + h - r);
192
+ ctx.lineTo(x, y + r);
193
+ ctx.quadraticCurveTo(x, y, x + r, y);
194
+ ctx.closePath();
195
+ }
196
+
197
+ // src/compositor.ts
198
+ var ASPECT_RATIO_MAP = {
199
+ "16:9": [16, 9],
200
+ "9:16": [9, 16],
201
+ "1:1": [1, 1],
202
+ "4:3": [4, 3],
203
+ "3:4": [3, 4],
204
+ original: [0, 0]
205
+ };
206
+ function resolveOutputDimensions(clip, videoWidth, videoHeight, options) {
207
+ const ar = clip.aspectRatio ?? "original";
208
+ const ratio = ASPECT_RATIO_MAP[ar] ?? [0, 0];
209
+ if (ratio[0] === 0) {
210
+ return [options.width ?? videoWidth, options.height ?? videoHeight];
211
+ }
212
+ const w = options.width ?? 1280;
213
+ const h = Math.round(w * (ratio[1] / ratio[0]));
214
+ return [w, h];
215
+ }
216
+ async function extractFrames(clip, options) {
217
+ const fps = options.fps ?? 30;
218
+ const onProgress = options.onProgress;
219
+ const signal = options.signal;
220
+ let srcUrl;
221
+ let needsRevoke = false;
222
+ if (typeof clip.source === "string") {
223
+ srcUrl = clip.source;
224
+ } else if (clip.source instanceof HTMLVideoElement) {
225
+ srcUrl = clip.source.src;
226
+ } else {
227
+ srcUrl = URL.createObjectURL(clip.source);
228
+ needsRevoke = true;
229
+ }
230
+ const video = document.createElement("video");
231
+ video.muted = true;
232
+ video.crossOrigin = "anonymous";
233
+ video.preload = "auto";
234
+ await new Promise((resolve, reject) => {
235
+ video.onloadedmetadata = () => resolve();
236
+ video.onerror = () => reject(new Error(`Failed to load video: ${srcUrl}`));
237
+ video.src = srcUrl;
238
+ });
239
+ const duration = video.duration;
240
+ const startTime = clip.startTime ?? 0;
241
+ const endTime = clip.endTime ?? duration;
242
+ const clipDuration = endTime - startTime;
243
+ const [outW, outH] = resolveOutputDimensions(
244
+ clip,
245
+ video.videoWidth,
246
+ video.videoHeight,
247
+ options
248
+ );
249
+ const canvas = document.createElement("canvas");
250
+ canvas.width = outW;
251
+ canvas.height = outH;
252
+ const ctx = canvas.getContext("2d", { willReadFrequently: true });
253
+ const totalFrames = Math.ceil(clipDuration * fps);
254
+ const frames = [];
255
+ const captionSegments = clip.captions?.segments ?? [];
256
+ const baseStylePreset = clip.captions?.style?.preset ?? "modern";
257
+ const baseStyle = mergeStyle(
258
+ STYLE_PRESETS[baseStylePreset],
259
+ clip.captions?.style
260
+ );
261
+ for (let i = 0; i < totalFrames; i++) {
262
+ if (signal?.aborted) throw new DOMException("Render cancelled", "AbortError");
263
+ const t = startTime + i / fps;
264
+ await seekVideo(video, t);
265
+ ctx.clearRect(0, 0, outW, outH);
266
+ drawVideoFrame(ctx, video, clip, outW, outH);
267
+ if (captionSegments.length > 0) {
268
+ const active = getActiveCaptions(captionSegments, t - startTime);
269
+ for (const seg of active) {
270
+ const segStyle = mergeStyle(baseStyle, seg.style);
271
+ renderCaption(ctx, seg, segStyle, outW, outH);
272
+ }
273
+ }
274
+ const imageData = ctx.getImageData(0, 0, outW, outH);
275
+ frames.push({ imageData, timestamp: t - startTime, width: outW, height: outH });
276
+ if (onProgress) onProgress(i / totalFrames);
277
+ }
278
+ if (needsRevoke) URL.revokeObjectURL(srcUrl);
279
+ return frames;
280
+ }
281
+ function drawVideoFrame(ctx, video, clip, outW, outH) {
282
+ const vw = video.videoWidth;
283
+ const vh = video.videoHeight;
284
+ if (clip.crop) {
285
+ const { x, y, width, height } = clip.crop;
286
+ ctx.drawImage(
287
+ video,
288
+ x * vw,
289
+ y * vh,
290
+ width * vw,
291
+ height * vh,
292
+ 0,
293
+ 0,
294
+ outW,
295
+ outH
296
+ );
297
+ } else {
298
+ const videoAR = vw / vh;
299
+ const outAR = outW / outH;
300
+ let sx = 0, sy = 0, sw = vw, sh = vh;
301
+ if (videoAR > outAR) {
302
+ sw = vh * outAR;
303
+ sx = (vw - sw) / 2;
304
+ } else if (videoAR < outAR) {
305
+ sh = vw / outAR;
306
+ sy = (vh - sh) / 2;
307
+ }
308
+ ctx.drawImage(video, sx, sy, sw, sh, 0, 0, outW, outH);
309
+ }
310
+ }
311
+ function seekVideo(video, time) {
312
+ return new Promise((resolve) => {
313
+ if (Math.abs(video.currentTime - time) < 1e-3) {
314
+ resolve();
315
+ return;
316
+ }
317
+ const onSeeked = () => {
318
+ video.removeEventListener("seeked", onSeeked);
319
+ resolve();
320
+ };
321
+ video.addEventListener("seeked", onSeeked);
322
+ video.currentTime = time;
323
+ });
324
+ }
325
+
326
+ // src/worker/pool.ts
327
+ var ASPECT_RATIO_MAP2 = {
328
+ "16:9": [16, 9],
329
+ "9:16": [9, 16],
330
+ "1:1": [1, 1],
331
+ "4:3": [4, 3],
332
+ "3:4": [3, 4],
333
+ original: [0, 0]
334
+ };
335
+ function resolveOutputDimensions2(clip, videoWidth, videoHeight, width, height) {
336
+ const ar = clip.aspectRatio ?? "original";
337
+ const ratio = ASPECT_RATIO_MAP2[ar] ?? [0, 0];
338
+ if (ratio[0] === 0) return [width, height];
339
+ const w = width;
340
+ const h = Math.round(w * (ratio[1] / ratio[0]));
341
+ return [w, h];
342
+ }
343
+ function seekVideo2(video, time) {
344
+ return new Promise((resolve) => {
345
+ if (Math.abs(video.currentTime - time) < 1e-3) {
346
+ resolve();
347
+ return;
348
+ }
349
+ const onSeeked = () => {
350
+ video.removeEventListener("seeked", onSeeked);
351
+ resolve();
352
+ };
353
+ video.addEventListener("seeked", onSeeked);
354
+ video.currentTime = time;
355
+ });
356
+ }
357
+ function drawVideoFrame2(ctx, video, clip, outW, outH) {
358
+ const vw = video.videoWidth;
359
+ const vh = video.videoHeight;
360
+ if (clip.crop) {
361
+ const { x, y, width, height } = clip.crop;
362
+ ctx.drawImage(video, x * vw, y * vh, width * vw, height * vh, 0, 0, outW, outH);
363
+ } else {
364
+ const videoAR = vw / vh;
365
+ const outAR = outW / outH;
366
+ let sx = 0, sy = 0, sw = vw, sh = vh;
367
+ if (videoAR > outAR) {
368
+ sw = vh * outAR;
369
+ sx = (vw - sw) / 2;
370
+ } else if (videoAR < outAR) {
371
+ sh = vw / outAR;
372
+ sy = (vh - sh) / 2;
373
+ }
374
+ ctx.drawImage(video, sx, sy, sw, sh, 0, 0, outW, outH);
375
+ }
376
+ }
377
+ var WorkerPool = class {
378
+ constructor(maxConcurrency) {
379
+ this.workers = [];
380
+ this.available = [];
381
+ this.waiters = [];
382
+ for (let i = 0; i < maxConcurrency; i++) {
383
+ const w = new Worker(new URL("./worker/render-worker.js", import.meta.url), { type: "module" });
384
+ this.workers.push(w);
385
+ this.available.push(w);
386
+ }
387
+ }
388
+ acquire() {
389
+ if (this.available.length > 0) return Promise.resolve(this.available.pop());
390
+ return new Promise((resolve) => this.waiters.push(resolve));
391
+ }
392
+ release(worker) {
393
+ if (this.waiters.length > 0) {
394
+ this.waiters.shift()(worker);
395
+ } else {
396
+ this.available.push(worker);
397
+ }
398
+ }
399
+ async dispatch(clip, width, height, fps, signal, onProgress) {
400
+ const worker = await this.acquire();
401
+ try {
402
+ return await this.processClip(worker, clip, width, height, fps, signal, onProgress);
403
+ } finally {
404
+ this.release(worker);
405
+ }
406
+ }
407
+ async processClip(worker, clip, width, height, fps, signal, onProgress) {
408
+ let srcUrl;
409
+ let needsRevoke = false;
410
+ if (typeof clip.source === "string") {
411
+ srcUrl = clip.source;
412
+ } else if (clip.source instanceof HTMLVideoElement) {
413
+ srcUrl = clip.source.src;
414
+ } else {
415
+ srcUrl = URL.createObjectURL(clip.source);
416
+ needsRevoke = true;
417
+ }
418
+ const video = document.createElement("video");
419
+ video.muted = true;
420
+ video.crossOrigin = "anonymous";
421
+ video.preload = "auto";
422
+ await new Promise((resolve, reject) => {
423
+ video.onloadedmetadata = () => resolve();
424
+ video.onerror = () => reject(new Error(`Failed to load video: ${srcUrl}`));
425
+ video.src = srcUrl;
426
+ });
427
+ const duration = video.duration;
428
+ const startTime = clip.startTime ?? 0;
429
+ const endTime = clip.endTime ?? duration;
430
+ const clipDuration = endTime - startTime;
431
+ const [outW, outH] = resolveOutputDimensions2(clip, video.videoWidth, video.videoHeight, width, height);
432
+ const totalFrames = Math.ceil(clipDuration * fps);
433
+ const canvas = document.createElement("canvas");
434
+ canvas.width = outW;
435
+ canvas.height = outH;
436
+ const ctx = canvas.getContext("2d");
437
+ const resultPromise = new Promise((resolve, reject) => {
438
+ const onMessage = (e) => {
439
+ const msg = e.data;
440
+ if (msg.type === "done") {
441
+ worker.removeEventListener("message", onMessage);
442
+ resolve(msg.frames);
443
+ } else if (msg.type === "error") {
444
+ worker.removeEventListener("message", onMessage);
445
+ reject(new Error(msg.message));
446
+ } else if (msg.type === "progress") {
447
+ onProgress?.(msg.value);
448
+ }
449
+ };
450
+ worker.addEventListener("message", onMessage);
451
+ });
452
+ const initMsg = {
453
+ type: "init",
454
+ meta: { width: outW, height: outH, fps, captions: clip.captions, totalFrames }
455
+ };
456
+ worker.postMessage(initMsg);
457
+ try {
458
+ for (let i = 0; i < totalFrames; i++) {
459
+ if (signal?.aborted) {
460
+ worker.postMessage({ type: "abort" });
461
+ throw new DOMException("Render cancelled", "AbortError");
462
+ }
463
+ const t = startTime + i / fps;
464
+ await seekVideo2(video, t);
465
+ ctx.clearRect(0, 0, outW, outH);
466
+ drawVideoFrame2(ctx, video, clip, outW, outH);
467
+ const bitmap = await createImageBitmap(canvas);
468
+ const frameMsg = { type: "frame", bitmap, timestamp: t - startTime, index: i };
469
+ worker.postMessage(frameMsg, [bitmap]);
470
+ }
471
+ worker.postMessage({ type: "end" });
472
+ const transferableFrames = await resultPromise;
473
+ return transferableFrames.map((f) => ({
474
+ imageData: new ImageData(new Uint8ClampedArray(f.buffer), f.width, f.height),
475
+ timestamp: f.timestamp,
476
+ width: f.width,
477
+ height: f.height
478
+ }));
479
+ } finally {
480
+ if (needsRevoke) URL.revokeObjectURL(srcUrl);
481
+ }
482
+ }
483
+ terminate() {
484
+ for (const w of this.workers) w.terminate();
485
+ this.workers.length = 0;
486
+ this.available.length = 0;
487
+ }
488
+ };
489
+
490
+ // src/stitch.ts
491
+ function supportsOffscreenWorkers() {
492
+ return typeof Worker !== "undefined" && typeof OffscreenCanvas !== "undefined" && typeof createImageBitmap !== "undefined";
493
+ }
494
+ async function stitchClips(clips, backend, options) {
495
+ if (supportsOffscreenWorkers() && clips.length > 1) {
496
+ return stitchParallel(clips, backend, options);
497
+ }
498
+ return stitchSequential(clips, backend, options);
499
+ }
500
+ async function stitchSequential(clips, backend, options) {
501
+ const fps = options.fps ?? 30;
502
+ const width = options.width ?? 1280;
503
+ const height = options.height ?? 720;
504
+ const { onProgress, onComplete, signal } = options;
505
+ const stitchStart = performance.now();
506
+ const clipStatuses = clips.map((_, i) => ({
507
+ index: i,
508
+ status: "pending",
509
+ progress: 0
510
+ }));
511
+ const clipMetrics = [];
512
+ const emit = (overall) => {
513
+ onProgress?.({ overall, clips: clipStatuses.slice() });
514
+ };
515
+ const blobs = [];
516
+ for (let ci = 0; ci < clips.length; ci++) {
517
+ clipStatuses[ci].status = "rendering";
518
+ emit(ci / clips.length);
519
+ const extractStart = performance.now();
520
+ const frames = await extractFrames(clips[ci], {
521
+ fps,
522
+ width,
523
+ height,
524
+ mimeType: options.mimeType,
525
+ quality: options.quality,
526
+ encoderOptions: options.encoderOptions,
527
+ signal,
528
+ onProgress: (p) => {
529
+ clipStatuses[ci].progress = p * 0.9;
530
+ emit((ci + p * 0.9) / clips.length);
531
+ }
532
+ });
533
+ const extractionMs = performance.now() - extractStart;
534
+ clipStatuses[ci].status = "encoding";
535
+ const encodeStart = performance.now();
536
+ const blob = await backend.encode(frames, {
537
+ width,
538
+ height,
539
+ fps,
540
+ mimeType: options.mimeType ?? "video/mp4",
541
+ quality: options.quality ?? 0.92,
542
+ encoderOptions: options.encoderOptions,
543
+ signal,
544
+ onProgress: (p) => {
545
+ clipStatuses[ci].progress = 0.9 + p * 0.1;
546
+ emit((ci + 0.9 + p * 0.1) / clips.length);
547
+ }
548
+ });
549
+ const encodingMs = performance.now() - encodeStart;
550
+ clipStatuses[ci].status = "done";
551
+ clipStatuses[ci].progress = 1;
552
+ clipMetrics.push({
553
+ clipId: String(ci),
554
+ extractionMs,
555
+ encodingMs,
556
+ totalMs: extractionMs + encodingMs,
557
+ framesExtracted: frames.length
558
+ });
559
+ blobs.push(blob);
560
+ }
561
+ let finalBlob;
562
+ let stitchMs = 0;
563
+ if (blobs.length === 1) {
564
+ emit(1);
565
+ finalBlob = blobs[0];
566
+ } else {
567
+ const stitchPhaseStart = performance.now();
568
+ finalBlob = await backend.concat(blobs, {
569
+ width,
570
+ height,
571
+ fps,
572
+ mimeType: options.mimeType ?? "video/mp4",
573
+ quality: options.quality ?? 0.92,
574
+ signal,
575
+ onProgress: (p) => emit((clips.length - 1 + p) / clips.length)
576
+ });
577
+ stitchMs = performance.now() - stitchPhaseStart;
578
+ }
579
+ const totalMs = performance.now() - stitchStart;
580
+ const totalFrames = clipMetrics.reduce((s, c) => s + c.framesExtracted, 0);
581
+ const metrics = {
582
+ totalMs,
583
+ extractionMs: clipMetrics.reduce((s, c) => s + c.extractionMs, 0),
584
+ encodingMs: clipMetrics.reduce((s, c) => s + c.encodingMs, 0),
585
+ stitchMs,
586
+ clips: clipMetrics,
587
+ framesPerSecond: totalFrames / (totalMs / 1e3)
588
+ };
589
+ onComplete?.(metrics);
590
+ return { blob: finalBlob, metrics };
591
+ }
592
+ async function stitchParallel(clips, backend, options) {
593
+ const fps = options.fps ?? 30;
594
+ const width = options.width ?? 1280;
595
+ const height = options.height ?? 720;
596
+ const { onProgress, onComplete, signal } = options;
597
+ const stitchStart = performance.now();
598
+ const concurrency = Math.min(
599
+ clips.length,
600
+ typeof navigator !== "undefined" ? navigator.hardwareConcurrency || 2 : 2,
601
+ 4
602
+ );
603
+ const clipStatuses = clips.map((_, i) => ({
604
+ index: i,
605
+ status: "pending",
606
+ progress: 0
607
+ }));
608
+ const clipMetrics = new Array(clips.length);
609
+ const emit = () => {
610
+ const overall = clipStatuses.reduce((sum, c) => sum + c.progress, 0) / clips.length;
611
+ onProgress?.({ overall, clips: clipStatuses.slice() });
612
+ };
613
+ const pool = new WorkerPool(concurrency);
614
+ const blobs = new Array(clips.length);
615
+ let encodeChain = Promise.resolve();
616
+ try {
617
+ await Promise.all(
618
+ clips.map(async (clip, ci) => {
619
+ clipStatuses[ci].status = "rendering";
620
+ emit();
621
+ const extractStart = performance.now();
622
+ const frames = await pool.dispatch(
623
+ clip,
624
+ width,
625
+ height,
626
+ fps,
627
+ signal,
628
+ (p) => {
629
+ clipStatuses[ci].progress = p * 0.85;
630
+ emit();
631
+ }
632
+ );
633
+ const extractionMs = performance.now() - extractStart;
634
+ clipStatuses[ci].status = "encoding";
635
+ clipStatuses[ci].progress = 0.85;
636
+ emit();
637
+ await new Promise((resolve, reject) => {
638
+ encodeChain = encodeChain.then(async () => {
639
+ const encodeStart = performance.now();
640
+ try {
641
+ blobs[ci] = await backend.encode(frames, {
642
+ width,
643
+ height,
644
+ fps,
645
+ mimeType: options.mimeType ?? "video/mp4",
646
+ quality: options.quality ?? 0.92,
647
+ encoderOptions: options.encoderOptions,
648
+ signal,
649
+ onProgress: (p) => {
650
+ clipStatuses[ci].progress = 0.85 + p * 0.15;
651
+ emit();
652
+ }
653
+ });
654
+ const encodingMs = performance.now() - encodeStart;
655
+ clipMetrics[ci] = {
656
+ clipId: String(ci),
657
+ extractionMs,
658
+ encodingMs,
659
+ totalMs: extractionMs + encodingMs,
660
+ framesExtracted: frames.length
661
+ };
662
+ clipStatuses[ci].status = "done";
663
+ clipStatuses[ci].progress = 1;
664
+ emit();
665
+ resolve();
666
+ } catch (err) {
667
+ clipStatuses[ci].status = "error";
668
+ reject(err);
669
+ throw err;
670
+ }
671
+ });
672
+ });
673
+ })
674
+ );
675
+ let finalBlob;
676
+ let stitchMs = 0;
677
+ if (blobs.length === 1) {
678
+ onProgress?.({ overall: 1, clips: clipStatuses.slice() });
679
+ finalBlob = blobs[0];
680
+ } else {
681
+ const stitchPhaseStart = performance.now();
682
+ finalBlob = await backend.concat(blobs, {
683
+ width,
684
+ height,
685
+ fps,
686
+ mimeType: options.mimeType ?? "video/mp4",
687
+ quality: options.quality ?? 0.92,
688
+ signal
689
+ });
690
+ stitchMs = performance.now() - stitchPhaseStart;
691
+ }
692
+ const totalMs = performance.now() - stitchStart;
693
+ const totalFrames = clipMetrics.reduce((s, c) => s + c.framesExtracted, 0);
694
+ const metrics = {
695
+ totalMs,
696
+ extractionMs: clipMetrics.reduce((s, c) => s + c.extractionMs, 0),
697
+ encodingMs: clipMetrics.reduce((s, c) => s + c.encodingMs, 0),
698
+ stitchMs,
699
+ clips: clipMetrics,
700
+ framesPerSecond: totalFrames / (totalMs / 1e3)
701
+ };
702
+ onComplete?.(metrics);
703
+ return { blob: finalBlob, metrics };
704
+ } finally {
705
+ pool.terminate();
706
+ }
707
+ }
708
+
709
+ // src/backends/ffmpeg.ts
710
+ var FFmpegBackend = class {
711
+ constructor() {
712
+ this.name = "ffmpeg.wasm";
713
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
714
+ this.ffmpeg = null;
715
+ this.initialized = false;
716
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
717
+ this._fetchFile = null;
718
+ }
719
+ async init() {
720
+ if (this.initialized) return;
721
+ const { FFmpeg } = await import('@ffmpeg/ffmpeg').catch(() => {
722
+ throw new Error(
723
+ "[FrameWorker] @ffmpeg/ffmpeg is required. Install it: npm install @ffmpeg/ffmpeg @ffmpeg/util"
724
+ );
725
+ });
726
+ const { fetchFile, toBlobURL } = await import('@ffmpeg/util');
727
+ const ffmpeg = new FFmpeg();
728
+ const baseURL = "https://unpkg.com/@ffmpeg/core@0.12.6/dist/esm";
729
+ await ffmpeg.load({
730
+ coreURL: await toBlobURL(`${baseURL}/ffmpeg-core.js`, "text/javascript"),
731
+ wasmURL: await toBlobURL(`${baseURL}/ffmpeg-core.wasm`, "application/wasm")
732
+ });
733
+ this._fetchFile = fetchFile;
734
+ this.ffmpeg = ffmpeg;
735
+ this.initialized = true;
736
+ }
737
+ async encode(frames, options) {
738
+ await this.init();
739
+ const { ffmpeg, _fetchFile: fetchFile } = this;
740
+ const { fps, width, height, onProgress, signal } = options;
741
+ const total = frames.length;
742
+ for (let i = 0; i < total; i++) {
743
+ if (signal?.aborted) throw new DOMException("Render cancelled", "AbortError");
744
+ const frame = frames[i];
745
+ const offscreen = new OffscreenCanvas(width, height);
746
+ const ctx = offscreen.getContext("2d");
747
+ ctx.putImageData(frame.imageData, 0, 0);
748
+ const blob = await offscreen.convertToBlob({ type: "image/png" });
749
+ const data2 = await fetchFile(blob);
750
+ await ffmpeg.writeFile(`frame${String(i).padStart(6, "0")}.png`, data2);
751
+ onProgress?.(i / total * 0.8);
752
+ }
753
+ await ffmpeg.exec([
754
+ "-framerate",
755
+ String(fps),
756
+ "-i",
757
+ "frame%06d.png",
758
+ "-c:v",
759
+ "libx264",
760
+ "-pix_fmt",
761
+ "yuv420p",
762
+ "-preset",
763
+ "fast",
764
+ "-crf",
765
+ "23",
766
+ "-movflags",
767
+ "+faststart",
768
+ "output.mp4"
769
+ ]);
770
+ onProgress?.(0.95);
771
+ const data = await ffmpeg.readFile("output.mp4");
772
+ for (let i = 0; i < total; i++) {
773
+ await ffmpeg.deleteFile(`frame${String(i).padStart(6, "0")}.png`).catch(() => {
774
+ });
775
+ }
776
+ await ffmpeg.deleteFile("output.mp4").catch(() => {
777
+ });
778
+ onProgress?.(1);
779
+ return new Blob([data.slice().buffer], { type: "video/mp4" });
780
+ }
781
+ async concat(blobs, options) {
782
+ await this.init();
783
+ const { ffmpeg, _fetchFile: fetchFile } = this;
784
+ const { onProgress } = options;
785
+ const listLines = [];
786
+ for (let i = 0; i < blobs.length; i++) {
787
+ const name = `clip${i}.mp4`;
788
+ const data = await fetchFile(blobs[i]);
789
+ await ffmpeg.writeFile(name, data);
790
+ listLines.push(`file '${name}'`);
791
+ onProgress?.(i / blobs.length * 0.6);
792
+ }
793
+ const encoder = new TextEncoder();
794
+ await ffmpeg.writeFile("concat.txt", encoder.encode(listLines.join("\n")));
795
+ await ffmpeg.exec([
796
+ "-f",
797
+ "concat",
798
+ "-safe",
799
+ "0",
800
+ "-i",
801
+ "concat.txt",
802
+ "-c",
803
+ "copy",
804
+ "stitched.mp4"
805
+ ]);
806
+ onProgress?.(0.9);
807
+ const out = await ffmpeg.readFile("stitched.mp4");
808
+ for (let i = 0; i < blobs.length; i++) {
809
+ await ffmpeg.deleteFile(`clip${i}.mp4`).catch(() => {
810
+ });
811
+ }
812
+ await ffmpeg.deleteFile("concat.txt").catch(() => {
813
+ });
814
+ await ffmpeg.deleteFile("stitched.mp4").catch(() => {
815
+ });
816
+ onProgress?.(1);
817
+ return new Blob([out.slice().buffer], { type: "video/mp4" });
818
+ }
819
+ async destroy() {
820
+ if (this.ffmpeg) {
821
+ await this.ffmpeg.terminate?.();
822
+ this.ffmpeg = null;
823
+ this.initialized = false;
824
+ }
825
+ }
826
+ };
827
+ function createFFmpegBackend() {
828
+ return new FFmpegBackend();
829
+ }
830
+
831
+ // src/render.ts
832
+ function segmentsToClips(videoUrl, segments) {
833
+ return segments.map((seg) => ({
834
+ source: videoUrl,
835
+ startTime: seg.start,
836
+ endTime: seg.end,
837
+ captions: seg.captions?.length ? { segments: seg.captions } : void 0
838
+ }));
839
+ }
840
+ async function exportClips(videoUrl, segments, options) {
841
+ const clips = segmentsToClips(videoUrl, segments);
842
+ const backend = createFFmpegBackend();
843
+ await backend.init();
844
+ return stitchClips(clips, backend, options ?? {});
845
+ }
846
+ async function exportClipsToUrl(videoUrl, segments, options) {
847
+ const { blob, metrics } = await exportClips(videoUrl, segments, options);
848
+ return { url: URL.createObjectURL(blob), metrics };
849
+ }
850
+ var render = exportClips;
851
+ var renderToUrl = exportClipsToUrl;
852
+
853
+ export { exportClips, exportClipsToUrl, render, renderToUrl };
854
+ //# sourceMappingURL=render.js.map
855
+ //# sourceMappingURL=render.js.map