@remotion/web-renderer 4.0.400 → 4.0.402

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -37,13 +37,7 @@ var __callDispose = (stack, error, hasError) => {
37
37
  };
38
38
 
39
39
  // src/render-media-on-web.tsx
40
- import {
41
- AudioSampleSource,
42
- BufferTarget,
43
- Output,
44
- StreamTarget,
45
- VideoSampleSource
46
- } from "mediabunny";
40
+ import { BufferTarget, StreamTarget } from "mediabunny";
47
41
  import { Internals as Internals7 } from "remotion";
48
42
 
49
43
  // src/add-sample.ts
@@ -404,6 +398,11 @@ var getRealFrameRange = (durationInFrames, frameRange) => {
404
398
  return frameRange;
405
399
  };
406
400
 
401
+ // src/get-audio-sample-source.ts
402
+ import {
403
+ AudioSampleSource
404
+ } from "mediabunny";
405
+
407
406
  // src/get-audio-encoding-config.ts
408
407
  import {
409
408
  canEncodeAudio,
@@ -427,6 +426,23 @@ var getDefaultAudioEncodingConfig = async () => {
427
426
  return null;
428
427
  };
429
428
 
429
+ // src/get-audio-sample-source.ts
430
+ var addAudioSampleSource = async ({
431
+ muted,
432
+ output
433
+ }) => {
434
+ if (muted) {
435
+ return null;
436
+ }
437
+ const defaultAudioEncodingConfig = await getDefaultAudioEncodingConfig();
438
+ if (!defaultAudioEncodingConfig) {
439
+ throw new Error("No default audio encoding config found");
440
+ }
441
+ const audioSampleSource = new AudioSampleSource(defaultAudioEncodingConfig);
442
+ output.addAudioTrack(audioSampleSource);
443
+ return { audioSampleSource, [Symbol.dispose]: () => audioSampleSource.close() };
444
+ };
445
+
430
446
  // src/internal-state.ts
431
447
  var makeInternalState = () => {
432
448
  let drawnPrecomposedPixels = 0;
@@ -473,6 +489,30 @@ var makeInternalState = () => {
473
489
  };
474
490
  };
475
491
 
492
+ // src/mediabunny-cleanups.ts
493
+ import { Output, VideoSampleSource } from "mediabunny";
494
+ var makeOutputWithCleanup = (options) => {
495
+ const output = new Output(options);
496
+ return {
497
+ output,
498
+ [Symbol.dispose]: () => {
499
+ if (output.state === "finalized" || output.state === "canceled") {
500
+ return;
501
+ }
502
+ output.cancel();
503
+ }
504
+ };
505
+ };
506
+ var makeVideoSampleSourceCleanup = (encodingConfig) => {
507
+ const videoSampleSource = new VideoSampleSource(encodingConfig);
508
+ return {
509
+ videoSampleSource,
510
+ [Symbol.dispose]: () => {
511
+ videoSampleSource.close();
512
+ }
513
+ };
514
+ };
515
+
476
516
  // src/mediabunny-mappings.ts
477
517
  import {
478
518
  Mp4OutputFormat,
@@ -3406,13 +3446,12 @@ var internalRenderMediaOnWeb = async ({
3406
3446
  licenseKey,
3407
3447
  muted
3408
3448
  }) => {
3409
- let __stack = [];
3449
+ let __stack2 = [];
3410
3450
  try {
3411
3451
  const outputTarget = userDesiredOutputTarget === null ? await canUseWebFsWriter() ? "web-fs" : "arraybuffer" : userDesiredOutputTarget;
3412
3452
  if (outputTarget === "web-fs") {
3413
3453
  await cleanupStaleOpfsFiles();
3414
3454
  }
3415
- const cleanupFns = [];
3416
3455
  const format = containerToMediabunnyContainer(container);
3417
3456
  if (codec && !format.getSupportedCodecs().includes(codecToMediabunnyCodec(codec))) {
3418
3457
  return Promise.reject(new Error(`Codec ${codec} is not supported for container ${container}`));
@@ -3432,7 +3471,7 @@ var internalRenderMediaOnWeb = async ({
3432
3471
  if (signal?.aborted) {
3433
3472
  return Promise.reject(new Error("renderMediaOnWeb() was cancelled"));
3434
3473
  }
3435
- const scaffold = __using(__stack, await createScaffold({
3474
+ const scaffold = __using(__stack2, await createScaffold({
3436
3475
  width: resolved.width,
3437
3476
  height: resolved.height,
3438
3477
  fps: resolved.fps,
@@ -3451,73 +3490,20 @@ var internalRenderMediaOnWeb = async ({
3451
3490
  defaultOutName: resolved.defaultOutName
3452
3491
  }), 0);
3453
3492
  const { delayRenderScope, div, timeUpdater, collectAssets } = scaffold;
3454
- const internalState = __using(__stack, makeInternalState(), 0);
3493
+ const internalState = __using(__stack2, makeInternalState(), 0);
3455
3494
  const artifactsHandler = handleArtifacts();
3456
3495
  const webFsTarget = outputTarget === "web-fs" ? await createWebFsTarget() : null;
3457
3496
  const target = webFsTarget ? new StreamTarget(webFsTarget.stream) : new BufferTarget;
3458
- const output = new Output({
3497
+ const outputWithCleanup = __using(__stack2, makeOutputWithCleanup({
3459
3498
  format,
3460
3499
  target
3461
- });
3500
+ }), 0);
3462
3501
  try {
3463
- if (signal?.aborted) {
3464
- throw new Error("renderMediaOnWeb() was cancelled");
3465
- }
3466
- await waitForReady({
3467
- timeoutInMilliseconds: delayRenderTimeoutInMilliseconds,
3468
- scope: delayRenderScope,
3469
- signal,
3470
- apiName: "renderMediaOnWeb",
3471
- internalState
3472
- });
3473
- if (signal?.aborted) {
3474
- throw new Error("renderMediaOnWeb() was cancelled");
3475
- }
3476
- cleanupFns.push(() => {
3477
- if (output.state === "finalized" || output.state === "canceled") {
3478
- return;
3479
- }
3480
- output.cancel();
3481
- });
3482
- const videoSampleSource = new VideoSampleSource({
3483
- codec: codecToMediabunnyCodec(codec),
3484
- bitrate: typeof videoBitrate === "number" ? videoBitrate : getQualityForWebRendererQuality(videoBitrate),
3485
- sizeChangeBehavior: "deny",
3486
- hardwareAcceleration,
3487
- latencyMode: "quality",
3488
- keyFrameInterval: keyframeIntervalInSeconds,
3489
- alpha: transparent ? "keep" : "discard"
3490
- });
3491
- cleanupFns.push(() => {
3492
- videoSampleSource.close();
3493
- });
3494
- output.addVideoTrack(videoSampleSource);
3495
- let audioSampleSource = null;
3496
- if (!muted) {
3497
- const defaultAudioEncodingConfig = await getDefaultAudioEncodingConfig();
3498
- if (!defaultAudioEncodingConfig) {
3499
- return Promise.reject(new Error("No default audio encoding config found"));
3500
- }
3501
- audioSampleSource = new AudioSampleSource(defaultAudioEncodingConfig);
3502
- cleanupFns.push(() => {
3503
- audioSampleSource?.close();
3504
- });
3505
- output.addAudioTrack(audioSampleSource);
3506
- }
3507
- await output.start();
3508
- if (signal?.aborted) {
3509
- throw new Error("renderMediaOnWeb() was cancelled");
3510
- }
3511
- const progress = {
3512
- renderedFrames: 0,
3513
- encodedFrames: 0
3514
- };
3515
- const throttledOnProgress = createThrottledProgressCallback(onProgress);
3516
- for (let frame = realFrameRange[0];frame <= realFrameRange[1]; frame++) {
3502
+ let __stack = [];
3503
+ try {
3517
3504
  if (signal?.aborted) {
3518
3505
  throw new Error("renderMediaOnWeb() was cancelled");
3519
3506
  }
3520
- timeUpdater.current?.update(frame);
3521
3507
  await waitForReady({
3522
3508
  timeoutInMilliseconds: delayRenderTimeoutInMilliseconds,
3523
3509
  scope: delayRenderScope,
@@ -3528,102 +3514,144 @@ var internalRenderMediaOnWeb = async ({
3528
3514
  if (signal?.aborted) {
3529
3515
  throw new Error("renderMediaOnWeb() was cancelled");
3530
3516
  }
3531
- const createFrameStart = performance.now();
3532
- const imageData = await createFrame({
3533
- div,
3534
- width: resolved.width,
3535
- height: resolved.height,
3536
- logLevel,
3537
- internalState
3538
- });
3539
- internalState.addCreateFrameTime(performance.now() - createFrameStart);
3517
+ const videoSampleSource = __using(__stack, makeVideoSampleSourceCleanup({
3518
+ codec: codecToMediabunnyCodec(codec),
3519
+ bitrate: typeof videoBitrate === "number" ? videoBitrate : getQualityForWebRendererQuality(videoBitrate),
3520
+ sizeChangeBehavior: "deny",
3521
+ hardwareAcceleration,
3522
+ latencyMode: "quality",
3523
+ keyFrameInterval: keyframeIntervalInSeconds,
3524
+ alpha: transparent ? "keep" : "discard"
3525
+ }), 0);
3526
+ outputWithCleanup.output.addVideoTrack(videoSampleSource.videoSampleSource);
3527
+ const audioSampleSource = __using(__stack, await addAudioSampleSource({
3528
+ muted,
3529
+ output: outputWithCleanup.output
3530
+ }), 0);
3531
+ await outputWithCleanup.output.start();
3540
3532
  if (signal?.aborted) {
3541
3533
  throw new Error("renderMediaOnWeb() was cancelled");
3542
3534
  }
3543
- const timestamp = Math.round((frame - realFrameRange[0]) / resolved.fps * 1e6);
3544
- const videoFrame = new VideoFrame(imageData, {
3545
- timestamp
3546
- });
3547
- progress.renderedFrames++;
3548
- throttledOnProgress?.({ ...progress });
3549
- let frameToEncode = videoFrame;
3550
- if (onFrame) {
3551
- const returnedFrame = await onFrame(videoFrame);
3535
+ const progress = {
3536
+ renderedFrames: 0,
3537
+ encodedFrames: 0
3538
+ };
3539
+ const throttledOnProgress = createThrottledProgressCallback(onProgress);
3540
+ for (let frame = realFrameRange[0];frame <= realFrameRange[1]; frame++) {
3552
3541
  if (signal?.aborted) {
3553
3542
  throw new Error("renderMediaOnWeb() was cancelled");
3554
3543
  }
3555
- frameToEncode = validateVideoFrame({
3556
- originalFrame: videoFrame,
3557
- returnedFrame,
3558
- expectedWidth: resolved.width,
3559
- expectedHeight: resolved.height,
3560
- expectedTimestamp: timestamp
3544
+ timeUpdater.current?.update(frame);
3545
+ await waitForReady({
3546
+ timeoutInMilliseconds: delayRenderTimeoutInMilliseconds,
3547
+ scope: delayRenderScope,
3548
+ signal,
3549
+ apiName: "renderMediaOnWeb",
3550
+ internalState
3561
3551
  });
3562
- }
3563
- const audioCombineStart = performance.now();
3564
- const assets = collectAssets.current.collectAssets();
3565
- if (onArtifact) {
3566
- await artifactsHandler.handle({
3567
- imageData,
3568
- frame,
3569
- assets,
3570
- onArtifact
3552
+ if (signal?.aborted) {
3553
+ throw new Error("renderMediaOnWeb() was cancelled");
3554
+ }
3555
+ const createFrameStart = performance.now();
3556
+ const imageData = await createFrame({
3557
+ div,
3558
+ width: resolved.width,
3559
+ height: resolved.height,
3560
+ logLevel,
3561
+ internalState
3562
+ });
3563
+ internalState.addCreateFrameTime(performance.now() - createFrameStart);
3564
+ if (signal?.aborted) {
3565
+ throw new Error("renderMediaOnWeb() was cancelled");
3566
+ }
3567
+ const timestamp = Math.round((frame - realFrameRange[0]) / resolved.fps * 1e6);
3568
+ const videoFrame = new VideoFrame(imageData, {
3569
+ timestamp
3571
3570
  });
3571
+ progress.renderedFrames++;
3572
+ throttledOnProgress?.({ ...progress });
3573
+ let frameToEncode = videoFrame;
3574
+ if (onFrame) {
3575
+ const returnedFrame = await onFrame(videoFrame);
3576
+ if (signal?.aborted) {
3577
+ throw new Error("renderMediaOnWeb() was cancelled");
3578
+ }
3579
+ frameToEncode = validateVideoFrame({
3580
+ originalFrame: videoFrame,
3581
+ returnedFrame,
3582
+ expectedWidth: resolved.width,
3583
+ expectedHeight: resolved.height,
3584
+ expectedTimestamp: timestamp
3585
+ });
3586
+ }
3587
+ const audioCombineStart = performance.now();
3588
+ const assets = collectAssets.current.collectAssets();
3589
+ if (onArtifact) {
3590
+ await artifactsHandler.handle({
3591
+ imageData,
3592
+ frame,
3593
+ assets,
3594
+ onArtifact
3595
+ });
3596
+ }
3597
+ if (signal?.aborted) {
3598
+ throw new Error("renderMediaOnWeb() was cancelled");
3599
+ }
3600
+ const audio = muted ? null : onlyInlineAudio({ assets, fps: resolved.fps, frame });
3601
+ internalState.addAudioMixingTime(performance.now() - audioCombineStart);
3602
+ const addSampleStart = performance.now();
3603
+ await Promise.all([
3604
+ addVideoSampleAndCloseFrame(frameToEncode, videoSampleSource.videoSampleSource),
3605
+ audio && audioSampleSource ? addAudioSample(audio, audioSampleSource.audioSampleSource) : Promise.resolve()
3606
+ ]);
3607
+ internalState.addAddSampleTime(performance.now() - addSampleStart);
3608
+ progress.encodedFrames++;
3609
+ throttledOnProgress?.({ ...progress });
3610
+ if (signal?.aborted) {
3611
+ throw new Error("renderMediaOnWeb() was cancelled");
3612
+ }
3572
3613
  }
3573
- if (signal?.aborted) {
3574
- throw new Error("renderMediaOnWeb() was cancelled");
3614
+ onProgress?.({ ...progress });
3615
+ videoSampleSource.videoSampleSource.close();
3616
+ audioSampleSource?.audioSampleSource.close();
3617
+ await outputWithCleanup.output.finalize();
3618
+ Internals7.Log.verbose({ logLevel, tag: "web-renderer" }, `Render timings: waitForReady=${internalState.getWaitForReadyTime().toFixed(2)}ms, createFrame=${internalState.getCreateFrameTime().toFixed(2)}ms, addSample=${internalState.getAddSampleTime().toFixed(2)}ms, audioMixing=${internalState.getAudioMixingTime().toFixed(2)}ms`);
3619
+ if (webFsTarget) {
3620
+ sendUsageEvent({
3621
+ licenseKey: licenseKey ?? null,
3622
+ succeeded: true,
3623
+ apiName: "renderMediaOnWeb"
3624
+ });
3625
+ await webFsTarget.close();
3626
+ return {
3627
+ getBlob: () => {
3628
+ return webFsTarget.getBlob();
3629
+ },
3630
+ internalState
3631
+ };
3575
3632
  }
3576
- const audio = muted ? null : onlyInlineAudio({ assets, fps: resolved.fps, frame });
3577
- internalState.addAudioMixingTime(performance.now() - audioCombineStart);
3578
- const addSampleStart = performance.now();
3579
- await Promise.all([
3580
- addVideoSampleAndCloseFrame(frameToEncode, videoSampleSource),
3581
- audio && audioSampleSource ? addAudioSample(audio, audioSampleSource) : Promise.resolve()
3582
- ]);
3583
- internalState.addAddSampleTime(performance.now() - addSampleStart);
3584
- progress.encodedFrames++;
3585
- throttledOnProgress?.({ ...progress });
3586
- if (signal?.aborted) {
3587
- throw new Error("renderMediaOnWeb() was cancelled");
3633
+ if (!(target instanceof BufferTarget)) {
3634
+ throw new Error("Expected target to be a BufferTarget");
3588
3635
  }
3589
- }
3590
- onProgress?.({ ...progress });
3591
- videoSampleSource.close();
3592
- audioSampleSource?.close();
3593
- await output.finalize();
3594
- Internals7.Log.verbose({ logLevel, tag: "web-renderer" }, `Render timings: waitForReady=${internalState.getWaitForReadyTime().toFixed(2)}ms, createFrame=${internalState.getCreateFrameTime().toFixed(2)}ms, addSample=${internalState.getAddSampleTime().toFixed(2)}ms, audioMixing=${internalState.getAudioMixingTime().toFixed(2)}ms`);
3595
- const mimeType = getMimeType(container);
3596
- if (webFsTarget) {
3597
3636
  sendUsageEvent({
3598
3637
  licenseKey: licenseKey ?? null,
3599
3638
  succeeded: true,
3600
3639
  apiName: "renderMediaOnWeb"
3601
3640
  });
3602
- await webFsTarget.close();
3603
3641
  return {
3604
3642
  getBlob: () => {
3605
- return webFsTarget.getBlob();
3643
+ if (!target.buffer) {
3644
+ throw new Error("The resulting buffer is empty");
3645
+ }
3646
+ return Promise.resolve(new Blob([target.buffer], { type: getMimeType(container) }));
3606
3647
  },
3607
3648
  internalState
3608
3649
  };
3650
+ } catch (_catch) {
3651
+ var _err = _catch, _hasErr = 1;
3652
+ } finally {
3653
+ __callDispose(__stack, _err, _hasErr);
3609
3654
  }
3610
- if (!(target instanceof BufferTarget)) {
3611
- throw new Error("Expected target to be a BufferTarget");
3612
- }
3613
- sendUsageEvent({
3614
- licenseKey: licenseKey ?? null,
3615
- succeeded: true,
3616
- apiName: "renderMediaOnWeb"
3617
- });
3618
- return {
3619
- getBlob: () => {
3620
- if (!target.buffer) {
3621
- throw new Error("The resulting buffer is empty");
3622
- }
3623
- return Promise.resolve(new Blob([target.buffer], { type: mimeType }));
3624
- },
3625
- internalState
3626
- };
3627
3655
  } catch (err) {
3628
3656
  if (!signal?.aborted) {
3629
3657
  sendUsageEvent({
@@ -3635,13 +3663,11 @@ var internalRenderMediaOnWeb = async ({
3635
3663
  });
3636
3664
  }
3637
3665
  throw err;
3638
- } finally {
3639
- cleanupFns.forEach((fn) => fn());
3640
3666
  }
3641
- } catch (_catch) {
3642
- var _err = _catch, _hasErr = 1;
3667
+ } catch (_catch2) {
3668
+ var _err2 = _catch2, _hasErr2 = 1;
3643
3669
  } finally {
3644
- __callDispose(__stack, _err, _hasErr);
3670
+ __callDispose(__stack2, _err2, _hasErr2);
3645
3671
  }
3646
3672
  };
3647
3673
  var renderMediaOnWeb = (options) => {
@@ -0,0 +1,8 @@
1
+ import { AudioSampleSource, type BufferTarget, type Output, type OutputFormat, type StreamTarget } from 'mediabunny';
2
+ export declare const addAudioSampleSource: ({ muted, output, }: {
3
+ muted: boolean;
4
+ output: Output<OutputFormat, BufferTarget | StreamTarget>;
5
+ }) => Promise<{
6
+ audioSampleSource: AudioSampleSource;
7
+ [Symbol.dispose]: () => void;
8
+ } | null>;
@@ -0,0 +1,10 @@
1
+ import type { OutputFormat, OutputOptions, Target, VideoEncodingConfig } from 'mediabunny';
2
+ import { Output, VideoSampleSource } from 'mediabunny';
3
+ export declare const makeOutputWithCleanup: <T extends OutputFormat, U extends Target>(options: OutputOptions<T, U>) => {
4
+ output: Output<T, U>;
5
+ [Symbol.dispose]: () => void;
6
+ };
7
+ export declare const makeVideoSampleSourceCleanup: (encodingConfig: VideoEncodingConfig) => {
8
+ videoSampleSource: VideoSampleSource;
9
+ [Symbol.dispose]: () => void;
10
+ };
package/package.json CHANGED
@@ -3,7 +3,7 @@
3
3
  "url": "https://github.com/remotion-dev/remotion/tree/main/packages/web-renderer"
4
4
  },
5
5
  "name": "@remotion/web-renderer",
6
- "version": "4.0.400",
6
+ "version": "4.0.402",
7
7
  "main": "dist/index.js",
8
8
  "type": "module",
9
9
  "sideEffects": false,
@@ -18,14 +18,14 @@
18
18
  "author": "Remotion <jonny@remotion.dev>",
19
19
  "license": "UNLICENSED",
20
20
  "dependencies": {
21
- "@remotion/licensing": "4.0.400",
22
- "remotion": "4.0.400",
21
+ "@remotion/licensing": "4.0.402",
22
+ "remotion": "4.0.402",
23
23
  "mediabunny": "1.27.3"
24
24
  },
25
25
  "devDependencies": {
26
- "@remotion/eslint-config-internal": "4.0.400",
27
- "@remotion/player": "4.0.400",
28
- "@remotion/media": "4.0.400",
26
+ "@remotion/eslint-config-internal": "4.0.402",
27
+ "@remotion/player": "4.0.402",
28
+ "@remotion/media": "4.0.402",
29
29
  "@typescript/native-preview": "7.0.0-dev.20260105.1",
30
30
  "@vitejs/plugin-react": "4.1.0",
31
31
  "@vitest/browser-playwright": "4.0.9",