@remotion/media 4.0.358 → 4.0.362

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -81,6 +81,9 @@ export const AudioForRendering = ({ volume: volumeProp, playbackRate, src, muted
81
81
  setReplaceWithHtml5Audio(true);
82
82
  return;
83
83
  }
84
+ if (result.type === 'cannot-decode-alpha') {
85
+ throw new Error(`Cannot decode alpha component for ${src}, and 'disallowFallbackToHtml5Audio' was set. But this should never happen, since you used the <Audio> tag. Please report this as a bug.`);
86
+ }
84
87
  if (result.type === 'network-error') {
85
88
  if (disallowFallbackToHtml5Audio) {
86
89
  cancelRender(new Error(`Cannot decode ${src}, and 'disallowFallbackToHtml5Audio' was set. Failing the render.`));
@@ -0,0 +1 @@
1
+ export declare const canBrowserUseWebGl2: () => boolean;
@@ -0,0 +1,13 @@
1
+ let browserCanUseWebGl2 = null;
2
+ const browserCanUseWebGl2Uncached = () => {
3
+ const canvas = new OffscreenCanvas(1, 1);
4
+ const context = canvas.getContext('webgl2');
5
+ return context !== null;
6
+ };
7
+ export const canBrowserUseWebGl2 = () => {
8
+ if (browserCanUseWebGl2 !== null) {
9
+ return browserCanUseWebGl2;
10
+ }
11
+ browserCanUseWebGl2 = browserCanUseWebGl2Uncached();
12
+ return browserCanUseWebGl2;
13
+ };
package/dist/caches.d.ts CHANGED
@@ -7,7 +7,7 @@ export declare const keyframeManager: {
7
7
  videoSampleSink: import("mediabunny").VideoSampleSink;
8
8
  src: string;
9
9
  logLevel: LogLevel;
10
- }) => Promise<import("./video-extraction/keyframe-bank").KeyframeBank | null>;
10
+ }) => Promise<import("./video-extraction/keyframe-bank").KeyframeBank | "has-alpha" | null>;
11
11
  getCacheStats: () => Promise<{
12
12
  count: number;
13
13
  totalSize: number;
package/dist/caches.js CHANGED
@@ -28,9 +28,9 @@ const getUncachedMaxCacheSize = (logLevel) => {
28
28
  if (window.remotion_initialMemoryAvailable !== undefined &&
29
29
  window.remotion_initialMemoryAvailable !== null) {
30
30
  const value = window.remotion_initialMemoryAvailable / 2;
31
- if (value < 240 * 1024 * 1024) {
32
- Internals.Log.verbose({ logLevel, tag: '@remotion/media' }, `Using cache size set based on minimum value of 240MB (which is more than half of the available system memory!)`);
33
- return 240 * 1024 * 1024;
31
+ if (value < 500 * 1024 * 1024) {
32
+ Internals.Log.verbose({ logLevel, tag: '@remotion/media' }, `Using cache size set based on minimum value of 500MB (which is more than half of the available system memory!)`);
33
+ return 500 * 1024 * 1024;
34
34
  }
35
35
  if (value > 20000 * 1024 * 1024) {
36
36
  Internals.Log.verbose({ logLevel, tag: '@remotion/media' }, `Using cache size set based on maximum value of 20GB (which is less than half of the available system memory)`);
@@ -226,11 +226,18 @@ function isNetworkError(error) {
226
226
 
227
227
  // src/video/timeout-utils.ts
228
228
  var sleep = (ms) => new Promise((resolve) => setTimeout(resolve, ms));
229
+
230
+ class TimeoutError extends Error {
231
+ constructor(message = "Operation timed out") {
232
+ super(message);
233
+ this.name = "TimeoutError";
234
+ }
235
+ }
229
236
  function withTimeout(promise, timeoutMs, errorMessage = "Operation timed out") {
230
237
  let timeoutId = null;
231
238
  const timeoutPromise = new Promise((_, reject) => {
232
239
  timeoutId = window.setTimeout(() => {
233
- reject(new Error(errorMessage));
240
+ reject(new TimeoutError(errorMessage));
234
241
  }, timeoutMs);
235
242
  });
236
243
  return Promise.race([
@@ -280,6 +287,7 @@ class MediaPlayer {
280
287
  audioBufferHealth = 0;
281
288
  audioIteratorStarted = false;
282
289
  HEALTHY_BUFER_THRESHOLD_SECONDS = 1;
290
+ mediaEnded = false;
283
291
  onVideoFrameCallback;
284
292
  constructor({
285
293
  canvas,
@@ -361,7 +369,8 @@ class MediaPlayer {
361
369
  }
362
370
  this.canvasSink = new CanvasSink(videoTrack, {
363
371
  poolSize: 2,
364
- fit: "contain"
372
+ fit: "contain",
373
+ alpha: true
365
374
  });
366
375
  this.canvas.width = videoTrack.displayWidth;
367
376
  this.canvas.height = videoTrack.displayHeight;
@@ -439,6 +448,8 @@ class MediaPlayer {
439
448
  src: this.src
440
449
  });
441
450
  if (newTime === null) {
451
+ this.videoAsyncId++;
452
+ this.nextFrame = null;
442
453
  this.clearCanvas();
443
454
  await this.cleanAudioIteratorAndNodes();
444
455
  return;
@@ -448,6 +459,7 @@ class MediaPlayer {
448
459
  if (isSignificantSeek) {
449
460
  this.nextFrame = null;
450
461
  this.audioSyncAnchor = this.sharedAudioContext.currentTime - newTime;
462
+ this.mediaEnded = false;
451
463
  if (this.audioSink) {
452
464
  await this.cleanAudioIteratorAndNodes();
453
465
  }
@@ -582,6 +594,7 @@ class MediaPlayer {
582
594
  }
583
595
  drawCurrentFrame() {
584
596
  if (this.context && this.nextFrame) {
597
+ this.context.clearRect(0, 0, this.canvas.width, this.canvas.height);
585
598
  this.context.drawImage(this.nextFrame.canvas, 0, 0);
586
599
  }
587
600
  if (this.onVideoFrameCallback && this.canvas) {
@@ -644,6 +657,7 @@ class MediaPlayer {
644
657
  while (true) {
645
658
  const newNextFrame = (await this.videoFrameIterator.next()).value ?? null;
646
659
  if (!newNextFrame) {
660
+ this.mediaEnded = true;
647
661
  break;
648
662
  }
649
663
  const playbackTime = this.getPlaybackTime();
@@ -714,12 +728,15 @@ class MediaPlayer {
714
728
  let result;
715
729
  try {
716
730
  result = await withTimeout(this.audioBufferIterator.next(), BUFFERING_TIMEOUT_MS, "Iterator timeout");
717
- } catch {
718
- this.setBufferingState(true);
731
+ } catch (error) {
732
+ if (error instanceof TimeoutError && !this.mediaEnded) {
733
+ this.setBufferingState(true);
734
+ }
719
735
  await sleep(10);
720
736
  continue;
721
737
  }
722
738
  if (result.done || !result.value) {
739
+ this.mediaEnded = true;
723
740
  break;
724
741
  }
725
742
  const { buffer, timestamp, duration } = result.value;
@@ -1527,6 +1544,21 @@ var makeAudioManager = () => {
1527
1544
  // src/video-extraction/keyframe-manager.ts
1528
1545
  import { Internals as Internals9 } from "remotion";
1529
1546
 
1547
+ // src/browser-can-use-webgl2.ts
1548
+ var browserCanUseWebGl2 = null;
1549
+ var browserCanUseWebGl2Uncached = () => {
1550
+ const canvas = new OffscreenCanvas(1, 1);
1551
+ const context = canvas.getContext("webgl2");
1552
+ return context !== null;
1553
+ };
1554
+ var canBrowserUseWebGl2 = () => {
1555
+ if (browserCanUseWebGl2 !== null) {
1556
+ return browserCanUseWebGl2;
1557
+ }
1558
+ browserCanUseWebGl2 = browserCanUseWebGl2Uncached();
1559
+ return browserCanUseWebGl2;
1560
+ };
1561
+
1530
1562
  // src/render-timestamp-range.ts
1531
1563
  var renderTimestampRange = (timestamps) => {
1532
1564
  if (timestamps.length === 0) {
@@ -1927,6 +1959,10 @@ var makeKeyframeManager = () => {
1927
1959
  const startPacket = await packetSink.getKeyPacket(timestamp, {
1928
1960
  verifyKeyPackets: true
1929
1961
  });
1962
+ const hasAlpha = startPacket?.sideData.alpha;
1963
+ if (hasAlpha && !canBrowserUseWebGl2()) {
1964
+ return "has-alpha";
1965
+ }
1930
1966
  if (!startPacket) {
1931
1967
  return null;
1932
1968
  }
@@ -2038,9 +2074,9 @@ var getUncachedMaxCacheSize = (logLevel) => {
2038
2074
  }
2039
2075
  if (window.remotion_initialMemoryAvailable !== undefined && window.remotion_initialMemoryAvailable !== null) {
2040
2076
  const value = window.remotion_initialMemoryAvailable / 2;
2041
- if (value < 240 * 1024 * 1024) {
2042
- Internals10.Log.verbose({ logLevel, tag: "@remotion/media" }, `Using cache size set based on minimum value of 240MB (which is more than half of the available system memory!)`);
2043
- return 240 * 1024 * 1024;
2077
+ if (value < 500 * 1024 * 1024) {
2078
+ Internals10.Log.verbose({ logLevel, tag: "@remotion/media" }, `Using cache size set based on minimum value of 500MB (which is more than half of the available system memory!)`);
2079
+ return 500 * 1024 * 1024;
2044
2080
  }
2045
2081
  if (value > 20000 * 1024 * 1024) {
2046
2082
  Internals10.Log.verbose({ logLevel, tag: "@remotion/media" }, `Using cache size set based on maximum value of 20GB (which is less than half of the available system memory)`);
@@ -2302,6 +2338,12 @@ var extractFrameInternal = async ({
2302
2338
  src,
2303
2339
  logLevel
2304
2340
  });
2341
+ if (keyframeBank === "has-alpha") {
2342
+ return {
2343
+ type: "cannot-decode-alpha",
2344
+ durationInSeconds: await sink.getDuration()
2345
+ };
2346
+ }
2305
2347
  if (!keyframeBank) {
2306
2348
  return {
2307
2349
  type: "success",
@@ -2367,6 +2409,12 @@ var extractFrameAndAudio = async ({
2367
2409
  if (frame?.type === "unknown-container-format") {
2368
2410
  return { type: "unknown-container-format" };
2369
2411
  }
2412
+ if (frame?.type === "cannot-decode-alpha") {
2413
+ return {
2414
+ type: "cannot-decode-alpha",
2415
+ durationInSeconds: frame.durationInSeconds
2416
+ };
2417
+ }
2370
2418
  if (audio === "unknown-container-format") {
2371
2419
  if (frame !== null) {
2372
2420
  frame?.frame?.close();
@@ -2426,6 +2474,15 @@ if (window.remotion_broadcastChannel && window.remotion_isMainTab) {
2426
2474
  window.remotion_broadcastChannel.postMessage(cannotDecodeResponse);
2427
2475
  return;
2428
2476
  }
2477
+ if (result.type === "cannot-decode-alpha") {
2478
+ const cannotDecodeAlphaResponse = {
2479
+ type: "response-cannot-decode-alpha",
2480
+ id: data.id,
2481
+ durationInSeconds: result.durationInSeconds
2482
+ };
2483
+ window.remotion_broadcastChannel.postMessage(cannotDecodeAlphaResponse);
2484
+ return;
2485
+ }
2429
2486
  if (result.type === "network-error") {
2430
2487
  const networkErrorResponse = {
2431
2488
  type: "response-network-error",
@@ -2544,6 +2601,14 @@ var extractFrameViaBroadcastChannel = ({
2544
2601
  window.remotion_broadcastChannel.removeEventListener("message", onMessage);
2545
2602
  return;
2546
2603
  }
2604
+ if (data.type === "response-cannot-decode-alpha") {
2605
+ resolve({
2606
+ type: "cannot-decode-alpha",
2607
+ durationInSeconds: data.durationInSeconds
2608
+ });
2609
+ window.remotion_broadcastChannel.removeEventListener("message", onMessage);
2610
+ return;
2611
+ }
2547
2612
  throw new Error(`Invalid message: ${JSON.stringify(data)}`);
2548
2613
  };
2549
2614
  window.remotion_broadcastChannel.addEventListener("message", onMessage);
@@ -2673,6 +2738,9 @@ var AudioForRendering = ({
2673
2738
  setReplaceWithHtml5Audio(true);
2674
2739
  return;
2675
2740
  }
2741
+ if (result.type === "cannot-decode-alpha") {
2742
+ throw new Error(`Cannot decode alpha component for ${src}, and 'disallowFallbackToHtml5Audio' was set. But this should never happen, since you used the <Audio> tag. Please report this as a bug.`);
2743
+ }
2676
2744
  if (result.type === "network-error") {
2677
2745
  if (disallowFallbackToHtml5Audio) {
2678
2746
  cancelRender2(new Error(`Cannot decode ${src}, and 'disallowFallbackToHtml5Audio' was set. Failing the render.`));
@@ -3236,6 +3304,18 @@ var VideoForRendering = ({
3236
3304
  });
3237
3305
  return;
3238
3306
  }
3307
+ if (result.type === "cannot-decode-alpha") {
3308
+ if (disallowFallbackToOffthreadVideo) {
3309
+ cancelRender3(new Error(`Cannot decode alpha component for ${src}, and 'disallowFallbackToOffthreadVideo' was set. Failing the render.`));
3310
+ }
3311
+ if (window.remotion_isMainTab) {
3312
+ Internals15.Log.info({ logLevel, tag: "@remotion/media" }, `Cannot decode alpha component for ${src}, falling back to <OffthreadVideo>`);
3313
+ }
3314
+ setReplaceWithOffthreadVideo({
3315
+ durationInSeconds: result.durationInSeconds
3316
+ });
3317
+ return;
3318
+ }
3239
3319
  if (result.type === "network-error") {
3240
3320
  if (disallowFallbackToOffthreadVideo) {
3241
3321
  cancelRender3(new Error(`Cannot decode ${src}, and 'disallowFallbackToOffthreadVideo' was set. Failing the render.`));
@@ -3253,7 +3333,9 @@ var VideoForRendering = ({
3253
3333
  } = result;
3254
3334
  if (imageBitmap) {
3255
3335
  onVideoFrame?.(imageBitmap);
3256
- const context = canvasRef.current?.getContext("2d");
3336
+ const context = canvasRef.current?.getContext("2d", {
3337
+ alpha: true
3338
+ });
3257
3339
  if (!context) {
3258
3340
  return;
3259
3341
  }
@@ -3263,7 +3345,9 @@ var VideoForRendering = ({
3263
3345
  context.drawImage(imageBitmap, 0, 0);
3264
3346
  imageBitmap.close();
3265
3347
  } else if (window.remotion_videoEnabled) {
3266
- const context = canvasRef.current?.getContext("2d");
3348
+ const context = canvasRef.current?.getContext("2d", {
3349
+ alpha: true
3350
+ });
3267
3351
  if (context) {
3268
3352
  context.clearRect(0, 0, context.canvas.width, context.canvas.height);
3269
3353
  }
@@ -3344,7 +3428,7 @@ var VideoForRendering = ({
3344
3428
  delayRenderTimeoutInMilliseconds: delayRenderTimeoutInMilliseconds ?? undefined,
3345
3429
  style,
3346
3430
  allowAmplificationDuringRender: true,
3347
- transparent: fallbackOffthreadVideoProps?.transparent ?? false,
3431
+ transparent: fallbackOffthreadVideoProps?.transparent ?? true,
3348
3432
  toneMapped: fallbackOffthreadVideoProps?.toneMapped ?? true,
3349
3433
  audioStreamIndex: audioStreamIndex ?? 0,
3350
3434
  name,
@@ -40,6 +40,12 @@ export const extractFrameAndAudio = async ({ src, timeInSeconds, logLevel, durat
40
40
  if (frame?.type === 'unknown-container-format') {
41
41
  return { type: 'unknown-container-format' };
42
42
  }
43
+ if (frame?.type === 'cannot-decode-alpha') {
44
+ return {
45
+ type: 'cannot-decode-alpha',
46
+ durationInSeconds: frame.durationInSeconds,
47
+ };
48
+ }
43
49
  if (audio === 'unknown-container-format') {
44
50
  if (frame !== null) {
45
51
  frame?.frame?.close();
@@ -45,6 +45,7 @@ export declare class MediaPlayer {
45
45
  private audioBufferHealth;
46
46
  private audioIteratorStarted;
47
47
  private readonly HEALTHY_BUFER_THRESHOLD_SECONDS;
48
+ private mediaEnded;
48
49
  private onVideoFrameCallback?;
49
50
  constructor({ canvas, src, logLevel, sharedAudioContext, loop, trimBefore, trimAfter, playbackRate, audioStreamIndex, fps, }: {
50
51
  canvas: HTMLCanvasElement | null;
@@ -2,7 +2,7 @@ import { ALL_FORMATS, AudioBufferSink, CanvasSink, Input, UrlSource, } from 'med
2
2
  import { Internals } from 'remotion';
3
3
  import { getTimeInSeconds } from '../get-time-in-seconds';
4
4
  import { isNetworkError } from '../is-network-error';
5
- import { sleep, withTimeout } from './timeout-utils';
5
+ import { sleep, TimeoutError, withTimeout } from './timeout-utils';
6
6
  export const SEEK_THRESHOLD = 0.05;
7
7
  const AUDIO_BUFFER_TOLERANCE_THRESHOLD = 0.1;
8
8
  export class MediaPlayer {
@@ -30,6 +30,7 @@ export class MediaPlayer {
30
30
  this.audioBufferHealth = 0;
31
31
  this.audioIteratorStarted = false;
32
32
  this.HEALTHY_BUFER_THRESHOLD_SECONDS = 1;
33
+ this.mediaEnded = false;
33
34
  this.input = null;
34
35
  this.render = () => {
35
36
  if (this.isBuffering) {
@@ -100,6 +101,7 @@ export class MediaPlayer {
100
101
  while (true) {
101
102
  const newNextFrame = (await this.videoFrameIterator.next()).value ?? null;
102
103
  if (!newNextFrame) {
104
+ this.mediaEnded = true;
103
105
  break;
104
106
  }
105
107
  const playbackTime = this.getPlaybackTime();
@@ -138,12 +140,16 @@ export class MediaPlayer {
138
140
  try {
139
141
  result = await withTimeout(this.audioBufferIterator.next(), BUFFERING_TIMEOUT_MS, 'Iterator timeout');
140
142
  }
141
- catch {
142
- this.setBufferingState(true);
143
+ catch (error) {
144
+ if (error instanceof TimeoutError && !this.mediaEnded) {
145
+ this.setBufferingState(true);
146
+ }
143
147
  await sleep(10);
144
148
  continue;
145
149
  }
150
+ // media has ended
146
151
  if (result.done || !result.value) {
152
+ this.mediaEnded = true;
147
153
  break;
148
154
  }
149
155
  const { buffer, timestamp, duration } = result.value;
@@ -258,6 +264,7 @@ export class MediaPlayer {
258
264
  this.canvasSink = new CanvasSink(videoTrack, {
259
265
  poolSize: 2,
260
266
  fit: 'contain',
267
+ alpha: true,
261
268
  });
262
269
  this.canvas.width = videoTrack.displayWidth;
263
270
  this.canvas.height = videoTrack.displayHeight;
@@ -336,6 +343,9 @@ export class MediaPlayer {
336
343
  src: this.src,
337
344
  });
338
345
  if (newTime === null) {
346
+ // invalidate in-flight video operations
347
+ this.videoAsyncId++;
348
+ this.nextFrame = null;
339
349
  this.clearCanvas();
340
350
  await this.cleanAudioIteratorAndNodes();
341
351
  return;
@@ -346,6 +356,7 @@ export class MediaPlayer {
346
356
  if (isSignificantSeek) {
347
357
  this.nextFrame = null;
348
358
  this.audioSyncAnchor = this.sharedAudioContext.currentTime - newTime;
359
+ this.mediaEnded = false;
349
360
  if (this.audioSink) {
350
361
  await this.cleanAudioIteratorAndNodes();
351
362
  }
@@ -473,6 +484,7 @@ export class MediaPlayer {
473
484
  }
474
485
  drawCurrentFrame() {
475
486
  if (this.context && this.nextFrame) {
487
+ this.context.clearRect(0, 0, this.canvas.width, this.canvas.height);
476
488
  this.context.drawImage(this.nextFrame.canvas, 0, 0);
477
489
  }
478
490
  if (this.onVideoFrameCallback && this.canvas) {
@@ -1,2 +1,5 @@
1
1
  export declare const sleep: (ms: number) => Promise<unknown>;
2
+ export declare class TimeoutError extends Error {
3
+ constructor(message?: string);
4
+ }
2
5
  export declare function withTimeout<T>(promise: Promise<T>, timeoutMs: number, errorMessage?: string): Promise<T>;
@@ -1,10 +1,16 @@
1
1
  /* eslint-disable no-promise-executor-return */
2
2
  export const sleep = (ms) => new Promise((resolve) => setTimeout(resolve, ms));
3
+ export class TimeoutError extends Error {
4
+ constructor(message = 'Operation timed out') {
5
+ super(message);
6
+ this.name = 'TimeoutError';
7
+ }
8
+ }
3
9
  export function withTimeout(promise, timeoutMs, errorMessage = 'Operation timed out') {
4
10
  let timeoutId = null;
5
11
  const timeoutPromise = new Promise((_, reject) => {
6
12
  timeoutId = window.setTimeout(() => {
7
- reject(new Error(errorMessage));
13
+ reject(new TimeoutError(errorMessage));
8
14
  }, timeoutMs);
9
15
  });
10
16
  return Promise.race([
@@ -88,6 +88,18 @@ export const VideoForRendering = ({ volume: volumeProp, playbackRate, src, muted
88
88
  });
89
89
  return;
90
90
  }
91
+ if (result.type === 'cannot-decode-alpha') {
92
+ if (disallowFallbackToOffthreadVideo) {
93
+ cancelRender(new Error(`Cannot decode alpha component for ${src}, and 'disallowFallbackToOffthreadVideo' was set. Failing the render.`));
94
+ }
95
+ if (window.remotion_isMainTab) {
96
+ Internals.Log.info({ logLevel, tag: '@remotion/media' }, `Cannot decode alpha component for ${src}, falling back to <OffthreadVideo>`);
97
+ }
98
+ setReplaceWithOffthreadVideo({
99
+ durationInSeconds: result.durationInSeconds,
100
+ });
101
+ return;
102
+ }
91
103
  if (result.type === 'network-error') {
92
104
  if (disallowFallbackToOffthreadVideo) {
93
105
  cancelRender(new Error(`Cannot decode ${src}, and 'disallowFallbackToOffthreadVideo' was set. Failing the render.`));
@@ -101,7 +113,9 @@ export const VideoForRendering = ({ volume: volumeProp, playbackRate, src, muted
101
113
  const { frame: imageBitmap, audio, durationInSeconds: assetDurationInSeconds, } = result;
102
114
  if (imageBitmap) {
103
115
  onVideoFrame?.(imageBitmap);
104
- const context = canvasRef.current?.getContext('2d');
116
+ const context = canvasRef.current?.getContext('2d', {
117
+ alpha: true,
118
+ });
105
119
  if (!context) {
106
120
  return;
107
121
  }
@@ -121,7 +135,9 @@ export const VideoForRendering = ({ volume: volumeProp, playbackRate, src, muted
121
135
  // In the case of https://discord.com/channels/809501355504959528/809501355504959531/1424400511070765086
122
136
  // A video that only starts at time 0.033sec
123
137
  // we shall not crash here but clear the canvas
124
- const context = canvasRef.current?.getContext('2d');
138
+ const context = canvasRef.current?.getContext('2d', {
139
+ alpha: true,
140
+ });
125
141
  if (context) {
126
142
  context.clearRect(0, 0, context.canvas.width, context.canvas.height);
127
143
  }
@@ -195,7 +211,7 @@ export const VideoForRendering = ({ volume: volumeProp, playbackRate, src, muted
195
211
  .join(' ');
196
212
  }, [className]);
197
213
  if (replaceWithOffthreadVideo) {
198
- const fallback = (_jsx(Internals.InnerOffthreadVideo, { src: src, playbackRate: playbackRate ?? 1, muted: muted ?? false, acceptableTimeShiftInSeconds: fallbackOffthreadVideoProps?.acceptableTimeShiftInSeconds, loopVolumeCurveBehavior: loopVolumeCurveBehavior ?? 'repeat', delayRenderRetries: delayRenderRetries ?? undefined, delayRenderTimeoutInMilliseconds: delayRenderTimeoutInMilliseconds ?? undefined, style: style, allowAmplificationDuringRender: true, transparent: fallbackOffthreadVideoProps?.transparent ?? false, toneMapped: fallbackOffthreadVideoProps?.toneMapped ?? true, audioStreamIndex: audioStreamIndex ?? 0, name: name, className: className, onVideoFrame: onVideoFrame, volume: volumeProp, id: id, onError: fallbackOffthreadVideoProps?.onError, toneFrequency: fallbackOffthreadVideoProps?.toneFrequency ?? 1,
214
+ const fallback = (_jsx(Internals.InnerOffthreadVideo, { src: src, playbackRate: playbackRate ?? 1, muted: muted ?? false, acceptableTimeShiftInSeconds: fallbackOffthreadVideoProps?.acceptableTimeShiftInSeconds, loopVolumeCurveBehavior: loopVolumeCurveBehavior ?? 'repeat', delayRenderRetries: delayRenderRetries ?? undefined, delayRenderTimeoutInMilliseconds: delayRenderTimeoutInMilliseconds ?? undefined, style: style, allowAmplificationDuringRender: true, transparent: fallbackOffthreadVideoProps?.transparent ?? true, toneMapped: fallbackOffthreadVideoProps?.toneMapped ?? true, audioStreamIndex: audioStreamIndex ?? 0, name: name, className: className, onVideoFrame: onVideoFrame, volume: volumeProp, id: id, onError: fallbackOffthreadVideoProps?.onError, toneFrequency: fallbackOffthreadVideoProps?.toneFrequency ?? 1,
199
215
  // these shouldn't matter during rendering / should not appear at all
200
216
  showInTimeline: false, crossOrigin: undefined, onAutoPlayError: () => undefined, pauseWhenBuffering: false, trimAfter: undefined, trimBefore: undefined, useWebAudioApi: false, startFrom: undefined, endAt: undefined, stack: stack, _remotionInternalNativeLoopPassed: false }));
201
217
  if (loop) {
@@ -8,6 +8,9 @@ export type ExtractFrameViaBroadcastChannelResult = {
8
8
  } | {
9
9
  type: 'cannot-decode';
10
10
  durationInSeconds: number | null;
11
+ } | {
12
+ type: 'cannot-decode-alpha';
13
+ durationInSeconds: number | null;
11
14
  } | {
12
15
  type: 'network-error';
13
16
  } | {
@@ -28,6 +28,15 @@ if (window.remotion_broadcastChannel && window.remotion_isMainTab) {
28
28
  window.remotion_broadcastChannel.postMessage(cannotDecodeResponse);
29
29
  return;
30
30
  }
31
+ if (result.type === 'cannot-decode-alpha') {
32
+ const cannotDecodeAlphaResponse = {
33
+ type: 'response-cannot-decode-alpha',
34
+ id: data.id,
35
+ durationInSeconds: result.durationInSeconds,
36
+ };
37
+ window.remotion_broadcastChannel.postMessage(cannotDecodeAlphaResponse);
38
+ return;
39
+ }
31
40
  if (result.type === 'network-error') {
32
41
  const networkErrorResponse = {
33
42
  type: 'response-network-error',
@@ -138,6 +147,14 @@ export const extractFrameViaBroadcastChannel = ({ src, timeInSeconds, logLevel,
138
147
  window.remotion_broadcastChannel.removeEventListener('message', onMessage);
139
148
  return;
140
149
  }
150
+ if (data.type === 'response-cannot-decode-alpha') {
151
+ resolve({
152
+ type: 'cannot-decode-alpha',
153
+ durationInSeconds: data.durationInSeconds,
154
+ });
155
+ window.remotion_broadcastChannel.removeEventListener('message', onMessage);
156
+ return;
157
+ }
141
158
  throw new Error(`Invalid message: ${JSON.stringify(data)}`);
142
159
  };
143
160
  window.remotion_broadcastChannel.addEventListener('message', onMessage);
@@ -7,6 +7,9 @@ type ExtractFrameResult = {
7
7
  } | {
8
8
  type: 'cannot-decode';
9
9
  durationInSeconds: number | null;
10
+ } | {
11
+ type: 'cannot-decode-alpha';
12
+ durationInSeconds: number | null;
10
13
  } | {
11
14
  type: 'unknown-container-format';
12
15
  };
@@ -42,6 +42,12 @@ const extractFrameInternal = async ({ src, timeInSeconds: unloopedTimeInSeconds,
42
42
  src,
43
43
  logLevel,
44
44
  });
45
+ if (keyframeBank === 'has-alpha') {
46
+ return {
47
+ type: 'cannot-decode-alpha',
48
+ durationInSeconds: await sink.getDuration(),
49
+ };
50
+ }
45
51
  if (!keyframeBank) {
46
52
  return {
47
53
  type: 'success',
@@ -8,7 +8,7 @@ export declare const makeKeyframeManager: () => {
8
8
  videoSampleSink: VideoSampleSink;
9
9
  src: string;
10
10
  logLevel: LogLevel;
11
- }) => Promise<KeyframeBank | null>;
11
+ }) => Promise<KeyframeBank | "has-alpha" | null>;
12
12
  getCacheStats: () => Promise<{
13
13
  count: number;
14
14
  totalSize: number;
@@ -1,4 +1,5 @@
1
1
  import { Internals } from 'remotion';
2
+ import { canBrowserUseWebGl2 } from '../browser-can-use-webgl2';
2
3
  import { getMaxVideoCacheSize, getTotalCacheStats, SAFE_BACK_WINDOW_IN_SECONDS, } from '../caches';
3
4
  import { renderTimestampRange } from '../render-timestamp-range';
4
5
  import { getFramesSinceKeyframe } from './get-frames-since-keyframe';
@@ -104,6 +105,10 @@ export const makeKeyframeManager = () => {
104
105
  const startPacket = await packetSink.getKeyPacket(timestamp, {
105
106
  verifyKeyPackets: true,
106
107
  });
108
+ const hasAlpha = startPacket?.sideData.alpha;
109
+ if (hasAlpha && !canBrowserUseWebGl2()) {
110
+ return 'has-alpha';
111
+ }
107
112
  if (!startPacket) {
108
113
  // e.g. https://discord.com/channels/809501355504959528/809501355504959531/1424400511070765086
109
114
  // The video has an offset and the first frame is at time 0.033sec
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@remotion/media",
3
- "version": "4.0.358",
3
+ "version": "4.0.362",
4
4
  "main": "dist/index.js",
5
5
  "types": "dist/index.d.ts",
6
6
  "module": "dist/esm/index.mjs",
@@ -22,7 +22,7 @@
22
22
  },
23
23
  "dependencies": {
24
24
  "mediabunny": "1.23.0",
25
- "remotion": "4.0.357",
25
+ "remotion": "4.0.362",
26
26
  "webdriverio": "9.19.2"
27
27
  },
28
28
  "peerDependencies": {
@@ -30,7 +30,7 @@
30
30
  "react-dom": ">=16.8.0"
31
31
  },
32
32
  "devDependencies": {
33
- "@remotion/eslint-config-internal": "4.0.357",
33
+ "@remotion/eslint-config-internal": "4.0.362",
34
34
  "@vitest/browser": "^3.2.4",
35
35
  "eslint": "9.19.0",
36
36
  "react": "19.0.0",