@remotion/media 4.0.351 → 4.0.353

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (66) hide show
  1. package/dist/audio/props.d.ts +1 -2
  2. package/dist/audio-extraction/audio-cache.js +4 -0
  3. package/dist/audio-extraction/audio-iterator.d.ts +1 -1
  4. package/dist/audio-extraction/audio-iterator.js +7 -4
  5. package/dist/audio-extraction/audio-manager.d.ts +3 -2
  6. package/dist/audio-extraction/audio-manager.js +4 -3
  7. package/dist/audio-extraction/extract-audio.d.ts +1 -1
  8. package/dist/audio-extraction/extract-audio.js +1 -0
  9. package/dist/caches.d.ts +9 -7
  10. package/dist/caches.js +37 -1
  11. package/dist/convert-audiodata/resample-audiodata.js +3 -2
  12. package/dist/esm/index.mjs +114 -88
  13. package/dist/video/props.d.ts +1 -0
  14. package/dist/video/video-for-rendering.js +23 -9
  15. package/dist/video-extraction/extract-frame-via-broadcast-channel.d.ts +1 -1
  16. package/dist/video-extraction/extract-frame.d.ts +1 -1
  17. package/dist/video-extraction/get-frames-since-keyframe.js +1 -1
  18. package/dist/video-extraction/keyframe-bank.d.ts +1 -1
  19. package/dist/video-extraction/keyframe-bank.js +7 -7
  20. package/dist/video-extraction/keyframe-manager.d.ts +1 -1
  21. package/dist/video-extraction/keyframe-manager.js +9 -8
  22. package/package.json +3 -3
  23. package/dist/audio-for-rendering.d.ts +0 -3
  24. package/dist/audio-for-rendering.js +0 -94
  25. package/dist/audio.d.ts +0 -3
  26. package/dist/audio.js +0 -60
  27. package/dist/audiodata-to-array.d.ts +0 -0
  28. package/dist/audiodata-to-array.js +0 -1
  29. package/dist/convert-audiodata/data-types.d.ts +0 -1
  30. package/dist/convert-audiodata/data-types.js +0 -22
  31. package/dist/convert-audiodata/is-planar-format.d.ts +0 -1
  32. package/dist/convert-audiodata/is-planar-format.js +0 -3
  33. package/dist/convert-audiodata/log-audiodata.d.ts +0 -1
  34. package/dist/convert-audiodata/log-audiodata.js +0 -8
  35. package/dist/convert-audiodata/trim-audiodata.d.ts +0 -0
  36. package/dist/convert-audiodata/trim-audiodata.js +0 -1
  37. package/dist/deserialized-audiodata.d.ts +0 -15
  38. package/dist/deserialized-audiodata.js +0 -26
  39. package/dist/extract-audio.d.ts +0 -7
  40. package/dist/extract-audio.js +0 -98
  41. package/dist/extract-frame-via-broadcast-channel.d.ts +0 -15
  42. package/dist/extract-frame-via-broadcast-channel.js +0 -104
  43. package/dist/extract-frame.d.ts +0 -27
  44. package/dist/extract-frame.js +0 -21
  45. package/dist/extrct-audio.d.ts +0 -7
  46. package/dist/extrct-audio.js +0 -94
  47. package/dist/get-frames-since-keyframe.d.ts +0 -22
  48. package/dist/get-frames-since-keyframe.js +0 -41
  49. package/dist/keyframe-bank.d.ts +0 -25
  50. package/dist/keyframe-bank.js +0 -120
  51. package/dist/keyframe-manager.d.ts +0 -23
  52. package/dist/keyframe-manager.js +0 -170
  53. package/dist/new-video-for-rendering.d.ts +0 -3
  54. package/dist/new-video-for-rendering.js +0 -108
  55. package/dist/new-video.d.ts +0 -3
  56. package/dist/new-video.js +0 -37
  57. package/dist/props.d.ts +0 -29
  58. package/dist/props.js +0 -1
  59. package/dist/remember-actual-matroska-timestamps.d.ts +0 -4
  60. package/dist/remember-actual-matroska-timestamps.js +0 -19
  61. package/dist/serialize-videoframe.d.ts +0 -0
  62. package/dist/serialize-videoframe.js +0 -1
  63. package/dist/video-for-rendering.d.ts +0 -3
  64. package/dist/video-for-rendering.js +0 -108
  65. package/dist/video.d.ts +0 -3
  66. package/dist/video.js +0 -37
@@ -1,8 +1,8 @@
1
1
  // src/audio/audio.tsx
2
2
  import { useCallback as useCallback2, useContext as useContext5 } from "react";
3
3
  import {
4
- cancelRender as cancelRender2,
5
- Internals as Internals2,
4
+ cancelRender as cancelRender3,
5
+ Internals as Internals6,
6
6
  Sequence,
7
7
  useRemotionEnvironment as useRemotionEnvironment3
8
8
  } from "remotion";
@@ -44,46 +44,18 @@ var SharedAudioContext = createContext2(null);
44
44
  // src/audio/audio-for-rendering.tsx
45
45
  import { useContext as useContext4, useLayoutEffect, useMemo as useMemo3, useState as useState3 } from "react";
46
46
  import {
47
- cancelRender,
48
- Internals,
47
+ cancelRender as cancelRender2,
48
+ Internals as Internals5,
49
49
  useCurrentFrame,
50
50
  useDelayRender,
51
51
  useRemotionEnvironment as useRemotionEnvironment2
52
52
  } from "remotion";
53
53
 
54
- // src/log.ts
55
- var logLevels = ["trace", "verbose", "info", "warn", "error"];
56
- var getNumberForLogLevel = (level) => {
57
- return logLevels.indexOf(level);
58
- };
59
- var isEqualOrBelowLogLevel = (currentLevel, level) => {
60
- return getNumberForLogLevel(currentLevel) <= getNumberForLogLevel(level);
61
- };
62
- var Log2 = {
63
- trace: (logLevel, ...args) => {
64
- if (isEqualOrBelowLogLevel(logLevel, "trace")) {
65
- return console.log(...args);
66
- }
67
- },
68
- verbose: (logLevel, ...args) => {
69
- if (isEqualOrBelowLogLevel(logLevel, "verbose")) {
70
- return console.log(...args);
71
- }
72
- },
73
- info: (logLevel, ...args) => {
74
- if (isEqualOrBelowLogLevel(logLevel, "info")) {
75
- return console.log(...args);
76
- }
77
- },
78
- warn: (logLevel, ...args) => {
79
- if (isEqualOrBelowLogLevel(logLevel, "warn")) {
80
- return console.warn(...args);
81
- }
82
- },
83
- error: (...args) => {
84
- return console.error(...args);
85
- }
86
- };
54
+ // src/caches.ts
55
+ import { cancelRender, Internals as Internals4 } from "remotion";
56
+
57
+ // src/audio-extraction/audio-iterator.ts
58
+ import { Internals } from "remotion";
87
59
 
88
60
  // src/audio-extraction/audio-cache.ts
89
61
  var makeAudioCache = () => {
@@ -97,6 +69,10 @@ var makeAudioCache = () => {
97
69
  for (const timestamp of timestamps) {
98
70
  const endTimestamp = timestamp + samples[timestamp].duration;
99
71
  if (endTimestamp < threshold) {
72
+ const isLast = timestamp === timestamps[timestamps.length - 1];
73
+ if (isLast) {
74
+ continue;
75
+ }
100
76
  samples[timestamp].close();
101
77
  delete samples[timestamp];
102
78
  timestamps.splice(timestamps.indexOf(timestamp), 1);
@@ -182,8 +158,9 @@ var makeAudioIterator = ({
182
158
  }
183
159
  const samples = cache.getSamples(timestamp, durationInSeconds);
184
160
  while (true) {
185
- cache.clearBeforeThreshold(timestamp - SAFE_BACK_WINDOW_IN_SECONDS);
186
161
  const sample = await getNextSample();
162
+ const deleteBefore = fullDuration === null ? timestamp : Math.min(timestamp, fullDuration);
163
+ cache.clearBeforeThreshold(deleteBefore - SAFE_BACK_WINDOW_IN_SECONDS);
187
164
  if (sample === null) {
188
165
  break;
189
166
  }
@@ -198,7 +175,7 @@ var makeAudioIterator = ({
198
175
  return samples;
199
176
  };
200
177
  const logOpenFrames = (logLevel) => {
201
- Log2.verbose(logLevel, "[Audio] Open samples for src", src, cache.getOpenTimestamps().map((t) => t.toFixed(3)).join(", "));
178
+ Internals.Log.verbose({ logLevel, tag: "@remotion/media" }, "Open audio samples for src", src, cache.getOpenTimestamps().map((t) => t.toFixed(3)).join(", "));
202
179
  };
203
180
  const getCacheStats = () => {
204
181
  return {
@@ -285,9 +262,11 @@ var makeAudioManager = () => {
285
262
  timeInSeconds,
286
263
  audioSampleSink,
287
264
  isMatroska,
288
- actualMatroskaTimestamps
265
+ actualMatroskaTimestamps,
266
+ logLevel
289
267
  }) => {
290
- while ((await getTotalCacheStats()).totalSize > MAX_CACHE_SIZE) {
268
+ const maxCacheSize = getMaxVideoCacheSize(logLevel);
269
+ while ((await getTotalCacheStats()).totalSize > maxCacheSize) {
291
270
  await deleteOldestIterator();
292
271
  }
293
272
  for (const iterator of iterators) {
@@ -333,6 +312,9 @@ var makeAudioManager = () => {
333
312
  };
334
313
  };
335
314
 
315
+ // src/video-extraction/keyframe-manager.ts
316
+ import { Internals as Internals3 } from "remotion";
317
+
336
318
  // ../../node_modules/.pnpm/mediabunny@1.17.0/node_modules/mediabunny/dist/modules/src/misc.js
337
319
  /*!
338
320
  * Copyright (c) 2025-present, Vanilagy and contributors
@@ -13270,6 +13252,7 @@ class Input {
13270
13252
  */
13271
13253
 
13272
13254
  // src/video-extraction/keyframe-bank.ts
13255
+ import { Internals as Internals2 } from "remotion";
13273
13256
  var roundTo4Digits = (timestamp) => {
13274
13257
  return Math.round(timestamp * 1000) / 1000;
13275
13258
  };
@@ -13281,7 +13264,7 @@ var makeKeyframeBank = ({
13281
13264
  const frames = {};
13282
13265
  const frameTimestamps = [];
13283
13266
  let lastUsed = Date.now();
13284
- let alloctionSize = 0;
13267
+ let allocationSize = 0;
13285
13268
  const hasDecodedEnoughForTimestamp = (timestamp) => {
13286
13269
  const lastFrameTimestamp = frameTimestamps[frameTimestamps.length - 1];
13287
13270
  if (!lastFrameTimestamp) {
@@ -13296,7 +13279,7 @@ var makeKeyframeBank = ({
13296
13279
  const addFrame = (frame) => {
13297
13280
  frames[frame.timestamp] = frame;
13298
13281
  frameTimestamps.push(frame.timestamp);
13299
- alloctionSize += frame.allocationSize();
13282
+ allocationSize += frame.allocationSize();
13300
13283
  lastUsed = Date.now();
13301
13284
  };
13302
13285
  const ensureEnoughFramesForTimestamp = async (timestamp) => {
@@ -13343,7 +13326,7 @@ var makeKeyframeBank = ({
13343
13326
  if (!frames[frameTimestamp]) {
13344
13327
  continue;
13345
13328
  }
13346
- alloctionSize -= frames[frameTimestamp].allocationSize();
13329
+ allocationSize -= frames[frameTimestamp].allocationSize();
13347
13330
  frames[frameTimestamp].close();
13348
13331
  delete frames[frameTimestamp];
13349
13332
  }
@@ -13363,17 +13346,17 @@ var makeKeyframeBank = ({
13363
13346
  if (!frames[frameTimestamp]) {
13364
13347
  continue;
13365
13348
  }
13366
- alloctionSize -= frames[frameTimestamp].allocationSize();
13349
+ allocationSize -= frames[frameTimestamp].allocationSize();
13367
13350
  frameTimestamps.splice(frameTimestamps.indexOf(frameTimestamp), 1);
13368
13351
  frames[frameTimestamp].close();
13369
13352
  delete frames[frameTimestamp];
13370
- Log2.verbose(logLevel, `[Video] Deleted frame ${frameTimestamp} for src ${src}`);
13353
+ Internals2.Log.verbose({ logLevel, tag: "@remotion/media" }, `Deleted frame ${frameTimestamp} for src ${src}`);
13371
13354
  }
13372
13355
  }
13373
13356
  };
13374
13357
  const getOpenFrameCount = () => {
13375
13358
  return {
13376
- size: alloctionSize,
13359
+ size: allocationSize,
13377
13360
  timestamps: frameTimestamps
13378
13361
  };
13379
13362
  };
@@ -13447,7 +13430,7 @@ var getFramesSinceKeyframe = async ({
13447
13430
  startPacket
13448
13431
  }) => {
13449
13432
  const nextKeyPacket = await packetSink.getNextKeyPacket(startPacket, {
13450
- verifyKeyPackets: false
13433
+ verifyKeyPackets: true
13451
13434
  });
13452
13435
  const sampleIterator = videoSampleSink.samples(startPacket.timestamp, nextKeyPacket ? nextKeyPacket.timestamp : Infinity);
13453
13436
  const keyframeBank = makeKeyframeBank({
@@ -13481,10 +13464,10 @@ var makeKeyframeManager = () => {
13481
13464
  if (size === 0) {
13482
13465
  continue;
13483
13466
  }
13484
- Log2.verbose(logLevel, `[Video] Open frames for src ${src}: ${timestamps.join(", ")}`);
13467
+ Internals3.Log.verbose({ logLevel, tag: "@remotion/media" }, `Open frames for src ${src}: ${timestamps.join(", ")}`);
13485
13468
  }
13486
13469
  }
13487
- Log2.verbose(logLevel, `[Video] Cache stats: ${count} open frames, ${totalSize} bytes`);
13470
+ Internals3.Log.verbose({ logLevel, tag: "@remotion/media" }, `Video cache stats: ${count} open frames, ${totalSize} bytes`);
13488
13471
  };
13489
13472
  const getCacheStats = async () => {
13490
13473
  let count = 0;
@@ -13525,12 +13508,13 @@ var makeKeyframeManager = () => {
13525
13508
  if (mostInThePastBank) {
13526
13509
  await mostInThePastBank.prepareForDeletion();
13527
13510
  delete sources[mostInThePastSrc][mostInThePastBank.startTimestampInSeconds];
13528
- Log2.verbose(logLevel, `[Video] Deleted frames for src ${mostInThePastSrc} from ${mostInThePastBank.startTimestampInSeconds}sec to ${mostInThePastBank.endTimestampInSeconds}sec to free up memory.`);
13511
+ Internals3.Log.verbose({ logLevel, tag: "@remotion/media" }, `Deleted frames for src ${mostInThePastSrc} from ${mostInThePastBank.startTimestampInSeconds}sec to ${mostInThePastBank.endTimestampInSeconds}sec to free up memory.`);
13529
13512
  }
13530
13513
  };
13531
13514
  const ensureToStayUnderMaxCacheSize = async (logLevel) => {
13532
13515
  let cacheStats = await getTotalCacheStats();
13533
- while (cacheStats.totalSize > MAX_CACHE_SIZE) {
13516
+ const maxCacheSize = getMaxVideoCacheSize(logLevel);
13517
+ while (cacheStats.totalSize > maxCacheSize) {
13534
13518
  await deleteOldestKeyframeBank(logLevel);
13535
13519
  cacheStats = await getTotalCacheStats();
13536
13520
  }
@@ -13550,7 +13534,7 @@ var makeKeyframeManager = () => {
13550
13534
  const { endTimestampInSeconds, startTimestampInSeconds } = bank;
13551
13535
  if (endTimestampInSeconds < threshold) {
13552
13536
  await bank.prepareForDeletion();
13553
- Log2.verbose(logLevel, `[Video] Cleared frames for src ${src} from ${startTimestampInSeconds}sec to ${endTimestampInSeconds}sec`);
13537
+ Internals3.Log.verbose({ logLevel, tag: "@remotion/media" }, `[Video] Cleared frames for src ${src} from ${startTimestampInSeconds}sec to ${endTimestampInSeconds}sec`);
13554
13538
  delete sources[src][startTimeInSeconds];
13555
13539
  } else {
13556
13540
  bank.deleteFramesBeforeTimestamp({
@@ -13589,7 +13573,7 @@ var makeKeyframeManager = () => {
13589
13573
  if (await (await existingBank).hasTimestampInSecond(timestamp)) {
13590
13574
  return existingBank;
13591
13575
  }
13592
- Log2.verbose(logLevel, `[Video] Bank exists but frames have already been evicted!`);
13576
+ Internals3.Log.verbose({ logLevel, tag: "@remotion/media" }, `Keyframe bank exists but frames have already been evicted!`);
13593
13577
  await (await existingBank).prepareForDeletion();
13594
13578
  delete sources[src][startTimestampInSeconds];
13595
13579
  const replacementKeybank = getFramesSinceKeyframe({
@@ -13642,7 +13626,6 @@ var makeKeyframeManager = () => {
13642
13626
  };
13643
13627
 
13644
13628
  // src/caches.ts
13645
- var MAX_CACHE_SIZE = 1000 * 1000 * 1000;
13646
13629
  var SAFE_BACK_WINDOW_IN_SECONDS = 1;
13647
13630
  var keyframeManager = makeKeyframeManager();
13648
13631
  var audioManager = makeAudioManager();
@@ -13654,6 +13637,40 @@ var getTotalCacheStats = async () => {
13654
13637
  totalSize: keyframeManagerCacheStats.totalSize + audioManagerCacheStats.totalSize
13655
13638
  };
13656
13639
  };
13640
+ var getUncachedMaxCacheSize = (logLevel) => {
13641
+ if (window.remotion_mediaCacheSizeInBytes !== undefined && window.remotion_mediaCacheSizeInBytes !== null) {
13642
+ if (window.remotion_mediaCacheSizeInBytes < 240 * 1024 * 1024) {
13643
+ cancelRender(new Error(`The minimum value for the "mediaCacheSizeInBytes" prop is 240MB (${240 * 1024 * 1024}), got: ${window.remotion_mediaCacheSizeInBytes}`));
13644
+ }
13645
+ if (window.remotion_mediaCacheSizeInBytes > 20000 * 1024 * 1024) {
13646
+ cancelRender(new Error(`The maximum value for the "mediaCacheSizeInBytes" prop is 20GB (${20000 * 1024 * 1024}), got: ${window.remotion_mediaCacheSizeInBytes}`));
13647
+ }
13648
+ Internals4.Log.verbose({ logLevel, tag: "@remotion/media" }, `Using cache size set using "mediaCacheSizeInBytes": ${(window.remotion_mediaCacheSizeInBytes / 1024 / 1024).toFixed(1)} MB`);
13649
+ return window.remotion_mediaCacheSizeInBytes;
13650
+ }
13651
+ if (window.remotion_initialMemoryAvailable !== undefined && window.remotion_initialMemoryAvailable !== null) {
13652
+ const value = window.remotion_initialMemoryAvailable / 2;
13653
+ if (value < 240 * 1024 * 1024) {
13654
+ Internals4.Log.verbose({ logLevel, tag: "@remotion/media" }, `Using cache size set based on minimum value of 240MB (which is more than half of the available system memory!)`);
13655
+ return 240 * 1024 * 1024;
13656
+ }
13657
+ if (value > 20000 * 1024 * 1024) {
13658
+ Internals4.Log.verbose({ logLevel, tag: "@remotion/media" }, `Using cache size set based on maximum value of 20GB (which is less than half of the available system memory)`);
13659
+ return 20000 * 1024 * 1024;
13660
+ }
13661
+ Internals4.Log.verbose({ logLevel, tag: "@remotion/media" }, `Using cache size set based on available memory (50% of available memory): ${(value / 1024 / 1024).toFixed(1)} MB`);
13662
+ return value;
13663
+ }
13664
+ return 1000 * 1000 * 1000;
13665
+ };
13666
+ var cachedMaxCacheSize = null;
13667
+ var getMaxVideoCacheSize = (logLevel) => {
13668
+ if (cachedMaxCacheSize !== null) {
13669
+ return cachedMaxCacheSize;
13670
+ }
13671
+ cachedMaxCacheSize = getUncachedMaxCacheSize(logLevel);
13672
+ return cachedMaxCacheSize;
13673
+ };
13657
13674
 
13658
13675
  // src/convert-audiodata/combine-audiodata.ts
13659
13676
  var combineAudioDataAndClosePrevious = (audioDataArray) => {
@@ -13752,8 +13769,9 @@ var resampleAudioData = ({
13752
13769
  const c = getSourceValues(start, end, 2);
13753
13770
  const sl = getSourceValues(start, end, 3);
13754
13771
  const sr = getSourceValues(start, end, 4);
13755
- const l2 = l + Math.sqrt(1 / 2) * (c + sl);
13756
- const r2 = r + Math.sqrt(1 / 2) * (c + sr);
13772
+ const sq = Math.sqrt(1 / 2);
13773
+ const l2 = l + sq * (c + sl);
13774
+ const r2 = r + sq * (c + sr);
13757
13775
  destination[newFrameIndex * 2 + 0] = l2;
13758
13776
  destination[newFrameIndex * 2 + 1] = r2;
13759
13777
  } else {
@@ -13872,7 +13890,8 @@ var extractAudio = async ({
13872
13890
  timeInSeconds,
13873
13891
  audioSampleSink: audio.sampleSink,
13874
13892
  isMatroska,
13875
- actualMatroskaTimestamps
13893
+ actualMatroskaTimestamps,
13894
+ logLevel
13876
13895
  });
13877
13896
  const samples = await sampleIterator.getSamples(timeInSeconds, durationInSeconds);
13878
13897
  audioManager.logOpenFrames(logLevel);
@@ -14074,11 +14093,11 @@ var AudioForRendering = ({
14074
14093
  logLevel = window.remotion_logLevel,
14075
14094
  loop
14076
14095
  }) => {
14077
- const absoluteFrame = Internals.useTimelinePosition();
14078
- const videoConfig = Internals.useUnsafeVideoConfig();
14079
- const { registerRenderAsset, unregisterRenderAsset } = useContext4(Internals.RenderAssetManager);
14096
+ const absoluteFrame = Internals5.useTimelinePosition();
14097
+ const videoConfig = Internals5.useUnsafeVideoConfig();
14098
+ const { registerRenderAsset, unregisterRenderAsset } = useContext4(Internals5.RenderAssetManager);
14080
14099
  const frame = useCurrentFrame();
14081
- const volumePropsFrame = Internals.useFrameForVolumeProp(loopVolumeCurveBehavior ?? "repeat");
14100
+ const volumePropsFrame = Internals5.useFrameForVolumeProp(loopVolumeCurveBehavior ?? "repeat");
14082
14101
  const environment = useRemotionEnvironment2();
14083
14102
  const [id] = useState3(() => `${Math.random()}`.replace("0.", ""));
14084
14103
  if (!videoConfig) {
@@ -14087,12 +14106,12 @@ var AudioForRendering = ({
14087
14106
  if (!src) {
14088
14107
  throw new TypeError("No `src` was passed to <Audio>.");
14089
14108
  }
14090
- const volume = Internals.evaluateVolume({
14109
+ const volume = Internals5.evaluateVolume({
14091
14110
  volume: volumeProp,
14092
14111
  frame: volumePropsFrame,
14093
14112
  mediaVolume: 1
14094
14113
  });
14095
- Internals.warnAboutTooHighVolume(volume);
14114
+ Internals5.warnAboutTooHighVolume(volume);
14096
14115
  const shouldRenderAudio = useMemo3(() => {
14097
14116
  if (!window.remotion_audioEnabled) {
14098
14117
  return false;
@@ -14140,7 +14159,7 @@ var AudioForRendering = ({
14140
14159
  }
14141
14160
  continueRender(newHandle);
14142
14161
  }).catch((error) => {
14143
- cancelRender(error);
14162
+ cancelRender2(error);
14144
14163
  });
14145
14164
  return () => {
14146
14165
  continueRender(newHandle);
@@ -14175,7 +14194,7 @@ var {
14175
14194
  resolveTrimProps,
14176
14195
  validateMediaProps,
14177
14196
  AudioForPreview
14178
- } = Internals2;
14197
+ } = Internals6;
14179
14198
  var Audio = (props) => {
14180
14199
  const audioContext = useContext5(SharedAudioContext);
14181
14200
  const {
@@ -14216,7 +14235,7 @@ var Audio = (props) => {
14216
14235
  onRemotionError(new Error(errMessage));
14217
14236
  return;
14218
14237
  }
14219
- cancelRender2(new Error(errMessage));
14238
+ cancelRender3(new Error(errMessage));
14220
14239
  } else {
14221
14240
  onRemotionError?.(new Error(errMessage));
14222
14241
  console.warn(errMessage);
@@ -14262,7 +14281,7 @@ var Audio = (props) => {
14262
14281
  };
14263
14282
  // src/video/video.tsx
14264
14283
  import { useCallback as useCallback3 } from "react";
14265
- import { Internals as Internals4, Sequence as Sequence2, useRemotionEnvironment as useRemotionEnvironment5 } from "remotion";
14284
+ import { Internals as Internals8, Sequence as Sequence2, useRemotionEnvironment as useRemotionEnvironment5 } from "remotion";
14266
14285
 
14267
14286
  // src/video/video-for-rendering.tsx
14268
14287
  import {
@@ -14273,11 +14292,12 @@ import {
14273
14292
  useState as useState4
14274
14293
  } from "react";
14275
14294
  import {
14276
- cancelRender as cancelRender3,
14277
- Internals as Internals3,
14295
+ cancelRender as cancelRender4,
14296
+ Internals as Internals7,
14278
14297
  useCurrentFrame as useCurrentFrame2,
14279
14298
  useDelayRender as useDelayRender2,
14280
- useRemotionEnvironment as useRemotionEnvironment4
14299
+ useRemotionEnvironment as useRemotionEnvironment4,
14300
+ useVideoConfig
14281
14301
  } from "remotion";
14282
14302
  import { jsx as jsx3 } from "react/jsx-runtime";
14283
14303
  var VideoForRendering = ({
@@ -14291,28 +14311,26 @@ var VideoForRendering = ({
14291
14311
  onVideoFrame,
14292
14312
  logLevel = window.remotion_logLevel,
14293
14313
  loop,
14294
- style
14314
+ style,
14315
+ className
14295
14316
  }) => {
14296
- const absoluteFrame = Internals3.useTimelinePosition();
14297
- const videoConfig = Internals3.useUnsafeVideoConfig();
14317
+ const absoluteFrame = Internals7.useTimelinePosition();
14318
+ const { fps } = useVideoConfig();
14298
14319
  const canvasRef = useRef2(null);
14299
- const { registerRenderAsset, unregisterRenderAsset } = useContext6(Internals3.RenderAssetManager);
14320
+ const { registerRenderAsset, unregisterRenderAsset } = useContext6(Internals7.RenderAssetManager);
14300
14321
  const frame = useCurrentFrame2();
14301
- const volumePropsFrame = Internals3.useFrameForVolumeProp(loopVolumeCurveBehavior ?? "repeat");
14322
+ const volumePropsFrame = Internals7.useFrameForVolumeProp(loopVolumeCurveBehavior ?? "repeat");
14302
14323
  const environment = useRemotionEnvironment4();
14303
14324
  const [id] = useState4(() => `${Math.random()}`.replace("0.", ""));
14304
- if (!videoConfig) {
14305
- throw new Error("No video config found");
14306
- }
14307
14325
  if (!src) {
14308
14326
  throw new TypeError("No `src` was passed to <Video>.");
14309
14327
  }
14310
- const volume = Internals3.evaluateVolume({
14328
+ const volume = Internals7.evaluateVolume({
14311
14329
  volume: volumeProp,
14312
14330
  frame: volumePropsFrame,
14313
14331
  mediaVolume: 1
14314
14332
  });
14315
- Internals3.warnAboutTooHighVolume(volume);
14333
+ Internals7.warnAboutTooHighVolume(volume);
14316
14334
  const shouldRenderAudio = useMemo4(() => {
14317
14335
  if (!window.remotion_audioEnabled) {
14318
14336
  return false;
@@ -14325,7 +14343,6 @@ var VideoForRendering = ({
14325
14343
  }
14326
14344
  return true;
14327
14345
  }, [muted, volume]);
14328
- const { fps } = videoConfig;
14329
14346
  const { delayRender, continueRender } = useDelayRender2();
14330
14347
  useLayoutEffect2(() => {
14331
14348
  if (!canvasRef.current) {
@@ -14351,10 +14368,17 @@ var VideoForRendering = ({
14351
14368
  }).then(({ frame: imageBitmap, audio }) => {
14352
14369
  if (imageBitmap) {
14353
14370
  onVideoFrame?.(imageBitmap);
14354
- canvasRef.current?.getContext("2d")?.drawImage(imageBitmap, 0, 0);
14371
+ const context = canvasRef.current?.getContext("2d");
14372
+ if (!context) {
14373
+ return;
14374
+ }
14375
+ context.canvas.width = imageBitmap instanceof ImageBitmap ? imageBitmap.width : imageBitmap.displayWidth;
14376
+ context.canvas.height = imageBitmap instanceof ImageBitmap ? imageBitmap.height : imageBitmap.displayHeight;
14377
+ context.canvas.style.aspectRatio = `${context.canvas.width} / ${context.canvas.height}`;
14378
+ context.drawImage(imageBitmap, 0, 0);
14355
14379
  imageBitmap.close();
14356
14380
  } else if (window.remotion_videoEnabled) {
14357
- cancelRender3(new Error("No video frame found"));
14381
+ cancelRender4(new Error("No video frame found"));
14358
14382
  }
14359
14383
  if (audio) {
14360
14384
  registerRenderAsset({
@@ -14370,7 +14394,7 @@ var VideoForRendering = ({
14370
14394
  }
14371
14395
  continueRender(newHandle);
14372
14396
  }).catch((error) => {
14373
- cancelRender3(error);
14397
+ cancelRender4(error);
14374
14398
  });
14375
14399
  return () => {
14376
14400
  continueRender(newHandle);
@@ -14396,11 +14420,13 @@ var VideoForRendering = ({
14396
14420
  volume,
14397
14421
  loop
14398
14422
  ]);
14423
+ const classNameValue = useMemo4(() => {
14424
+ return [Internals7.OBJECTFIT_CONTAIN_CLASS_NAME, className].filter(Internals7.truthy).join(" ");
14425
+ }, [className]);
14399
14426
  return /* @__PURE__ */ jsx3("canvas", {
14400
14427
  ref: canvasRef,
14401
14428
  style,
14402
- width: videoConfig.width,
14403
- height: videoConfig.height
14429
+ className: classNameValue
14404
14430
  });
14405
14431
  };
14406
14432
 
@@ -14411,7 +14437,7 @@ var {
14411
14437
  resolveTrimProps: resolveTrimProps2,
14412
14438
  validateMediaProps: validateMediaProps2,
14413
14439
  VideoForPreview
14414
- } = Internals4;
14440
+ } = Internals8;
14415
14441
  var Video = (props) => {
14416
14442
  const {
14417
14443
  trimBefore,
@@ -1,6 +1,7 @@
1
1
  import type { LogLevel, LoopVolumeCurveBehavior, OnVideoFrame, VolumeProp } from 'remotion';
2
2
  export type VideoProps = {
3
3
  src: string;
4
+ className?: string;
4
5
  trimBefore?: number;
5
6
  trimAfter?: number;
6
7
  volume?: VolumeProp;
@@ -1,21 +1,18 @@
1
1
  import { jsx as _jsx } from "react/jsx-runtime";
2
2
  import { useContext, useLayoutEffect, useMemo, useRef, useState, } from 'react';
3
- import { cancelRender, Internals, useCurrentFrame, useDelayRender, useRemotionEnvironment, } from 'remotion';
3
+ import { cancelRender, Internals, useCurrentFrame, useDelayRender, useRemotionEnvironment, useVideoConfig, } from 'remotion';
4
4
  import { extractFrameViaBroadcastChannel } from '../video-extraction/extract-frame-via-broadcast-channel';
5
5
  export const VideoForRendering = ({ volume: volumeProp, playbackRate, src, muted, loopVolumeCurveBehavior, delayRenderRetries, delayRenderTimeoutInMilliseconds,
6
6
  // call when a frame of the video, i.e. frame drawn on canvas
7
- onVideoFrame, logLevel = window.remotion_logLevel, loop, style, }) => {
7
+ onVideoFrame, logLevel = window.remotion_logLevel, loop, style, className, }) => {
8
8
  const absoluteFrame = Internals.useTimelinePosition();
9
- const videoConfig = Internals.useUnsafeVideoConfig();
9
+ const { fps } = useVideoConfig();
10
10
  const canvasRef = useRef(null);
11
11
  const { registerRenderAsset, unregisterRenderAsset } = useContext(Internals.RenderAssetManager);
12
12
  const frame = useCurrentFrame();
13
13
  const volumePropsFrame = Internals.useFrameForVolumeProp(loopVolumeCurveBehavior ?? 'repeat');
14
14
  const environment = useRemotionEnvironment();
15
15
  const [id] = useState(() => `${Math.random()}`.replace('0.', ''));
16
- if (!videoConfig) {
17
- throw new Error('No video config found');
18
- }
19
16
  if (!src) {
20
17
  throw new TypeError('No `src` was passed to <Video>.');
21
18
  }
@@ -37,7 +34,6 @@ onVideoFrame, logLevel = window.remotion_logLevel, loop, style, }) => {
37
34
  }
38
35
  return true;
39
36
  }, [muted, volume]);
40
- const { fps } = videoConfig;
41
37
  const { delayRender, continueRender } = useDelayRender();
42
38
  useLayoutEffect(() => {
43
39
  if (!canvasRef.current) {
@@ -64,7 +60,20 @@ onVideoFrame, logLevel = window.remotion_logLevel, loop, style, }) => {
64
60
  .then(({ frame: imageBitmap, audio }) => {
65
61
  if (imageBitmap) {
66
62
  onVideoFrame?.(imageBitmap);
67
- canvasRef.current?.getContext('2d')?.drawImage(imageBitmap, 0, 0);
63
+ const context = canvasRef.current?.getContext('2d');
64
+ if (!context) {
65
+ return;
66
+ }
67
+ context.canvas.width =
68
+ imageBitmap instanceof ImageBitmap
69
+ ? imageBitmap.width
70
+ : imageBitmap.displayWidth;
71
+ context.canvas.height =
72
+ imageBitmap instanceof ImageBitmap
73
+ ? imageBitmap.height
74
+ : imageBitmap.displayHeight;
75
+ context.canvas.style.aspectRatio = `${context.canvas.width} / ${context.canvas.height}`;
76
+ context.drawImage(imageBitmap, 0, 0);
68
77
  imageBitmap.close();
69
78
  }
70
79
  else if (window.remotion_videoEnabled) {
@@ -111,5 +120,10 @@ onVideoFrame, logLevel = window.remotion_logLevel, loop, style, }) => {
111
120
  volume,
112
121
  loop,
113
122
  ]);
114
- return (_jsx("canvas", { ref: canvasRef, style: style, width: videoConfig.width, height: videoConfig.height }));
123
+ const classNameValue = useMemo(() => {
124
+ return [Internals.OBJECTFIT_CONTAIN_CLASS_NAME, className]
125
+ .filter(Internals.truthy)
126
+ .join(' ');
127
+ }, [className]);
128
+ return _jsx("canvas", { ref: canvasRef, style: style, className: classNameValue });
115
129
  };
@@ -1,5 +1,5 @@
1
+ import { type LogLevel } from 'remotion';
1
2
  import type { PcmS16AudioData } from '../convert-audiodata/convert-audiodata';
2
- import type { LogLevel } from '../log';
3
3
  export declare const extractFrameViaBroadcastChannel: ({ src, timeInSeconds, logLevel, durationInSeconds, includeAudio, includeVideo, isClientSideRendering, volume, loop, }: {
4
4
  src: string;
5
5
  timeInSeconds: number;
@@ -1,4 +1,4 @@
1
- import type { LogLevel } from '../log';
1
+ import { type LogLevel } from 'remotion';
2
2
  import { type GetSink } from './get-frames-since-keyframe';
3
3
  export declare const sinkPromises: Record<string, Promise<GetSink>>;
4
4
  export declare const extractFrame: ({ src, timeInSeconds: unloopedTimeinSeconds, logLevel, loop, }: {
@@ -30,7 +30,7 @@ export const getSinks = async (src) => {
30
30
  };
31
31
  export const getFramesSinceKeyframe = async ({ packetSink, videoSampleSink, startPacket, }) => {
32
32
  const nextKeyPacket = await packetSink.getNextKeyPacket(startPacket, {
33
- verifyKeyPackets: false,
33
+ verifyKeyPackets: true,
34
34
  });
35
35
  const sampleIterator = videoSampleSink.samples(startPacket.timestamp, nextKeyPacket ? nextKeyPacket.timestamp : Infinity);
36
36
  const keyframeBank = makeKeyframeBank({
@@ -1,5 +1,5 @@
1
1
  import type { VideoSample } from 'mediabunny';
2
- import type { LogLevel } from '../log';
2
+ import { type LogLevel } from 'remotion';
3
3
  export type KeyframeBank = {
4
4
  startTimestampInSeconds: number;
5
5
  endTimestampInSeconds: number;
@@ -1,4 +1,4 @@
1
- import { Log } from '../log';
1
+ import { Internals } from 'remotion';
2
2
  // Round to only 4 digits, because WebM has a timescale of 1_000, e.g. framer.webm
3
3
  const roundTo4Digits = (timestamp) => {
4
4
  return Math.round(timestamp * 1000) / 1000;
@@ -7,7 +7,7 @@ export const makeKeyframeBank = ({ startTimestampInSeconds, endTimestampInSecond
7
7
  const frames = {};
8
8
  const frameTimestamps = [];
9
9
  let lastUsed = Date.now();
10
- let alloctionSize = 0;
10
+ let allocationSize = 0;
11
11
  const hasDecodedEnoughForTimestamp = (timestamp) => {
12
12
  const lastFrameTimestamp = frameTimestamps[frameTimestamps.length - 1];
13
13
  if (!lastFrameTimestamp) {
@@ -24,7 +24,7 @@ export const makeKeyframeBank = ({ startTimestampInSeconds, endTimestampInSecond
24
24
  const addFrame = (frame) => {
25
25
  frames[frame.timestamp] = frame;
26
26
  frameTimestamps.push(frame.timestamp);
27
- alloctionSize += frame.allocationSize();
27
+ allocationSize += frame.allocationSize();
28
28
  lastUsed = Date.now();
29
29
  };
30
30
  const ensureEnoughFramesForTimestamp = async (timestamp) => {
@@ -72,7 +72,7 @@ export const makeKeyframeBank = ({ startTimestampInSeconds, endTimestampInSecond
72
72
  if (!frames[frameTimestamp]) {
73
73
  continue;
74
74
  }
75
- alloctionSize -= frames[frameTimestamp].allocationSize();
75
+ allocationSize -= frames[frameTimestamp].allocationSize();
76
76
  frames[frameTimestamp].close();
77
77
  delete frames[frameTimestamp];
78
78
  }
@@ -89,17 +89,17 @@ export const makeKeyframeBank = ({ startTimestampInSeconds, endTimestampInSecond
89
89
  if (!frames[frameTimestamp]) {
90
90
  continue;
91
91
  }
92
- alloctionSize -= frames[frameTimestamp].allocationSize();
92
+ allocationSize -= frames[frameTimestamp].allocationSize();
93
93
  frameTimestamps.splice(frameTimestamps.indexOf(frameTimestamp), 1);
94
94
  frames[frameTimestamp].close();
95
95
  delete frames[frameTimestamp];
96
- Log.verbose(logLevel, `[Video] Deleted frame ${frameTimestamp} for src ${src}`);
96
+ Internals.Log.verbose({ logLevel, tag: '@remotion/media' }, `Deleted frame ${frameTimestamp} for src ${src}`);
97
97
  }
98
98
  }
99
99
  };
100
100
  const getOpenFrameCount = () => {
101
101
  return {
102
- size: alloctionSize,
102
+ size: allocationSize,
103
103
  timestamps: frameTimestamps,
104
104
  };
105
105
  };
@@ -1,5 +1,5 @@
1
1
  import type { EncodedPacketSink, VideoSampleSink } from 'mediabunny';
2
- import type { LogLevel } from '../log';
2
+ import { type LogLevel } from 'remotion';
3
3
  import { type KeyframeBank } from './keyframe-bank';
4
4
  export declare const makeKeyframeManager: () => {
5
5
  requestKeyframeBank: ({ packetSink, timestamp, videoSampleSink, src, logLevel, }: {