@remotion/media 4.0.372 → 4.0.374

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -13,7 +13,7 @@ const AudioForPreviewAssertedShowing = ({ src, playbackRate, logLevel, muted, vo
13
13
  const [mediaPlayerReady, setMediaPlayerReady] = useState(false);
14
14
  const [shouldFallbackToNativeAudio, setShouldFallbackToNativeAudio] = useState(false);
15
15
  const [playing] = Timeline.usePlayingState();
16
- const timelineContext = useContext(Timeline.TimelineContext);
16
+ const timelineContext = useContext(Internals.TimelineContext);
17
17
  const globalPlaybackRate = timelineContext.playbackRate;
18
18
  const sharedAudioContext = useContext(SharedAudioContext);
19
19
  const buffer = useBufferState();
@@ -44,6 +44,7 @@ const extractAudioInternal = async ({ src, timeInSeconds: unloopedTimeInSeconds,
44
44
  const durationInSeconds = durationNotYetApplyingPlaybackRate * playbackRate;
45
45
  const samples = await sampleIterator.getSamples(timeInSeconds, durationInSeconds);
46
46
  audioManager.logOpenFrames();
47
+ const trimStartToleranceInSeconds = 0.002;
47
48
  const audioDataArray = [];
48
49
  for (let i = 0; i < samples.length; i++) {
49
50
  const sample = samples[i];
@@ -65,7 +66,8 @@ const extractAudioInternal = async ({ src, timeInSeconds: unloopedTimeInSeconds,
65
66
  let trimEndInSeconds = 0;
66
67
  if (isFirstSample) {
67
68
  trimStartInSeconds = timeInSeconds - sample.timestamp;
68
- if (trimStartInSeconds < 0 && trimStartInSeconds > -1e-10) {
69
+ if (trimStartInSeconds < 0 &&
70
+ trimStartInSeconds > -trimStartToleranceInSeconds) {
69
71
  trimStartInSeconds = 0;
70
72
  }
71
73
  if (trimStartInSeconds < 0) {
@@ -994,11 +994,6 @@ class MediaPlayer {
994
994
  if (currentPlaybackTime === newTime) {
995
995
  return;
996
996
  }
997
- const newAudioSyncAnchor = this.sharedAudioContext.currentTime - newTime / (this.playbackRate * this.globalPlaybackRate);
998
- const diff = Math.abs(newAudioSyncAnchor - this.audioSyncAnchor);
999
- if (diff > 0.04) {
1000
- this.setPlaybackTime(newTime, this.playbackRate * this.globalPlaybackRate);
1001
- }
1002
997
  await this.videoIteratorManager?.seek({
1003
998
  newTime,
1004
999
  nonce
@@ -1362,7 +1357,7 @@ var AudioForPreviewAssertedShowing = ({
1362
1357
  const [mediaPlayerReady, setMediaPlayerReady] = useState2(false);
1363
1358
  const [shouldFallbackToNativeAudio, setShouldFallbackToNativeAudio] = useState2(false);
1364
1359
  const [playing] = Timeline.usePlayingState();
1365
- const timelineContext = useContext2(Timeline.TimelineContext);
1360
+ const timelineContext = useContext2(Internals6.TimelineContext);
1366
1361
  const globalPlaybackRate = timelineContext.playbackRate;
1367
1362
  const sharedAudioContext = useContext2(SharedAudioContext);
1368
1363
  const buffer = useBufferState();
@@ -2270,19 +2265,26 @@ var makeKeyframeBank = ({
2270
2265
  };
2271
2266
  const getFrameFromTimestamp = async (timestampInSeconds) => {
2272
2267
  lastUsed = Date.now();
2273
- if (timestampInSeconds < startTimestampInSeconds) {
2274
- return Promise.reject(new Error(`Timestamp is before start timestamp (requested: ${timestampInSeconds}sec, start: ${startTimestampInSeconds})`));
2268
+ const maxClampToleranceInSeconds = 0.1;
2269
+ let adjustedTimestamp = timestampInSeconds;
2270
+ if (roundTo4Digits(timestampInSeconds) < roundTo4Digits(startTimestampInSeconds)) {
2271
+ const differenceInSeconds = startTimestampInSeconds - timestampInSeconds;
2272
+ if (differenceInSeconds <= maxClampToleranceInSeconds) {
2273
+ adjustedTimestamp = startTimestampInSeconds;
2274
+ } else {
2275
+ return Promise.reject(new Error(`Timestamp is before start timestamp (requested: ${timestampInSeconds}sec, start: ${startTimestampInSeconds}sec, difference: ${differenceInSeconds.toFixed(3)}sec exceeds tolerance of ${maxClampToleranceInSeconds}sec)`));
2276
+ }
2275
2277
  }
2276
- if (timestampInSeconds > endTimestampInSeconds) {
2277
- return Promise.reject(new Error(`Timestamp is after end timestamp (requested: ${timestampInSeconds}sec, end: ${endTimestampInSeconds})`));
2278
+ if (roundTo4Digits(adjustedTimestamp) > roundTo4Digits(endTimestampInSeconds)) {
2279
+ return Promise.reject(new Error(`Timestamp is after end timestamp (requested: ${timestampInSeconds}sec, end: ${endTimestampInSeconds}sec)`));
2278
2280
  }
2279
- await ensureEnoughFramesForTimestamp(timestampInSeconds);
2281
+ await ensureEnoughFramesForTimestamp(adjustedTimestamp);
2280
2282
  for (let i = frameTimestamps.length - 1;i >= 0; i--) {
2281
2283
  const sample = frames[frameTimestamps[i]];
2282
2284
  if (!sample) {
2283
2285
  return null;
2284
2286
  }
2285
- if (roundTo4Digits(sample.timestamp) <= roundTo4Digits(timestampInSeconds) || Math.abs(sample.timestamp - timestampInSeconds) <= 0.001) {
2287
+ if (roundTo4Digits(sample.timestamp) <= roundTo4Digits(adjustedTimestamp) || Math.abs(sample.timestamp - adjustedTimestamp) <= 0.001) {
2286
2288
  return sample;
2287
2289
  }
2288
2290
  }
@@ -2588,7 +2590,7 @@ var makeKeyframeManager = () => {
2588
2590
  }) => {
2589
2591
  const startPacket = await packetSink.getKeyPacket(timestamp, {
2590
2592
  verifyKeyPackets: true
2591
- });
2593
+ }) ?? await packetSink.getFirstPacket({ verifyKeyPackets: true });
2592
2594
  const hasAlpha = startPacket?.sideData.alpha;
2593
2595
  if (hasAlpha && !canBrowserUseWebGl2()) {
2594
2596
  return "has-alpha";
@@ -2890,6 +2892,7 @@ var extractAudioInternal = async ({
2890
2892
  const durationInSeconds = durationNotYetApplyingPlaybackRate * playbackRate;
2891
2893
  const samples = await sampleIterator.getSamples(timeInSeconds, durationInSeconds);
2892
2894
  audioManager.logOpenFrames();
2895
+ const trimStartToleranceInSeconds = 0.002;
2893
2896
  const audioDataArray = [];
2894
2897
  for (let i = 0;i < samples.length; i++) {
2895
2898
  const sample = samples[i];
@@ -2906,7 +2909,7 @@ var extractAudioInternal = async ({
2906
2909
  let trimEndInSeconds = 0;
2907
2910
  if (isFirstSample) {
2908
2911
  trimStartInSeconds = timeInSeconds - sample.timestamp;
2909
- if (trimStartInSeconds < 0 && trimStartInSeconds > -0.0000000001) {
2912
+ if (trimStartInSeconds < 0 && trimStartInSeconds > -trimStartToleranceInSeconds) {
2910
2913
  trimStartInSeconds = 0;
2911
2914
  }
2912
2915
  if (trimStartInSeconds < 0) {
@@ -3583,7 +3586,7 @@ var VideoForPreviewAssertedShowing = ({
3583
3586
  const [mediaPlayerReady, setMediaPlayerReady] = useState4(false);
3584
3587
  const [shouldFallbackToNativeVideo, setShouldFallbackToNativeVideo] = useState4(false);
3585
3588
  const [playing] = Timeline2.usePlayingState();
3586
- const timelineContext = useContext4(Timeline2.TimelineContext);
3589
+ const timelineContext = useContext4(Internals15.TimelineContext);
3587
3590
  const globalPlaybackRate = timelineContext.playbackRate;
3588
3591
  const sharedAudioContext = useContext4(SharedAudioContext2);
3589
3592
  const buffer = useBufferState2();
@@ -228,12 +228,6 @@ export class MediaPlayer {
228
228
  if (currentPlaybackTime === newTime) {
229
229
  return;
230
230
  }
231
- const newAudioSyncAnchor = this.sharedAudioContext.currentTime -
232
- newTime / (this.playbackRate * this.globalPlaybackRate);
233
- const diff = Math.abs(newAudioSyncAnchor - this.audioSyncAnchor);
234
- if (diff > 0.04) {
235
- this.setPlaybackTime(newTime, this.playbackRate * this.globalPlaybackRate);
236
- }
237
231
  await this.videoIteratorManager?.seek({
238
232
  newTime,
239
233
  nonce,
@@ -15,7 +15,7 @@ const VideoForPreviewAssertedShowing = ({ src: unpreloadedSrc, style, playbackRa
15
15
  const [mediaPlayerReady, setMediaPlayerReady] = useState(false);
16
16
  const [shouldFallbackToNativeVideo, setShouldFallbackToNativeVideo] = useState(false);
17
17
  const [playing] = Timeline.usePlayingState();
18
- const timelineContext = useContext(Timeline.TimelineContext);
18
+ const timelineContext = useContext(Internals.TimelineContext);
19
19
  const globalPlaybackRate = timelineContext.playbackRate;
20
20
  const sharedAudioContext = useContext(SharedAudioContext);
21
21
  const buffer = useBufferState();
@@ -68,23 +68,35 @@ export const makeKeyframeBank = ({ startTimestampInSeconds, endTimestampInSecond
68
68
  };
69
69
  const getFrameFromTimestamp = async (timestampInSeconds) => {
70
70
  lastUsed = Date.now();
71
- if (timestampInSeconds < startTimestampInSeconds) {
72
- return Promise.reject(new Error(`Timestamp is before start timestamp (requested: ${timestampInSeconds}sec, start: ${startTimestampInSeconds})`));
71
+ // Videos may start slightly after timestamp 0 due to encoding, but if the requested timestamp is too far before the bank start, something is likely wrong.
72
+ const maxClampToleranceInSeconds = 0.1;
73
+ // If the requested timestamp is before the start of this bank, clamp it to the start if within tolerance. This handles videos that don't start at timestamp 0.
74
+ // For example, requesting frame at 0sec when video starts at 0.04sec should return the frame at 0.04sec.
75
+ // Test case: https://github.com/remotion-dev/remotion/issues/5915
76
+ let adjustedTimestamp = timestampInSeconds;
77
+ if (roundTo4Digits(timestampInSeconds) <
78
+ roundTo4Digits(startTimestampInSeconds)) {
79
+ const differenceInSeconds = startTimestampInSeconds - timestampInSeconds;
80
+ if (differenceInSeconds <= maxClampToleranceInSeconds) {
81
+ adjustedTimestamp = startTimestampInSeconds;
82
+ }
83
+ else {
84
+ return Promise.reject(new Error(`Timestamp is before start timestamp (requested: ${timestampInSeconds}sec, start: ${startTimestampInSeconds}sec, difference: ${differenceInSeconds.toFixed(3)}sec exceeds tolerance of ${maxClampToleranceInSeconds}sec)`));
85
+ }
73
86
  }
74
- if (timestampInSeconds > endTimestampInSeconds) {
75
- return Promise.reject(new Error(`Timestamp is after end timestamp (requested: ${timestampInSeconds}sec, end: ${endTimestampInSeconds})`));
87
+ if (roundTo4Digits(adjustedTimestamp) > roundTo4Digits(endTimestampInSeconds)) {
88
+ return Promise.reject(new Error(`Timestamp is after end timestamp (requested: ${timestampInSeconds}sec, end: ${endTimestampInSeconds}sec)`));
76
89
  }
77
- await ensureEnoughFramesForTimestamp(timestampInSeconds);
90
+ await ensureEnoughFramesForTimestamp(adjustedTimestamp);
78
91
  for (let i = frameTimestamps.length - 1; i >= 0; i--) {
79
92
  const sample = frames[frameTimestamps[i]];
80
93
  if (!sample) {
81
94
  return null;
82
95
  }
83
- if (roundTo4Digits(sample.timestamp) <=
84
- roundTo4Digits(timestampInSeconds) ||
96
+ if (roundTo4Digits(sample.timestamp) <= roundTo4Digits(adjustedTimestamp) ||
85
97
  // Match 0.3333333333 to 0.33355555
86
98
  // this does not satisfy the previous condition, since one rounds up and one rounds down
87
- Math.abs(sample.timestamp - timestampInSeconds) <= 0.001) {
99
+ Math.abs(sample.timestamp - adjustedTimestamp) <= 0.001) {
88
100
  return sample;
89
101
  }
90
102
  }
@@ -111,9 +111,12 @@ export const makeKeyframeManager = () => {
111
111
  await logCacheStats(logLevel);
112
112
  };
113
113
  const getKeyframeBankOrRefetch = async ({ packetSink, timestamp, videoSampleSink, src, logLevel, }) => {
114
- const startPacket = await packetSink.getKeyPacket(timestamp, {
114
+ // Try to get the keypacket at the requested timestamp.
115
+ // If it returns null (timestamp is before the first keypacket), fall back to the first packet.
116
+ // This matches mediabunny's internal behavior and handles videos that don't start at timestamp 0.
117
+ const startPacket = (await packetSink.getKeyPacket(timestamp, {
115
118
  verifyKeyPackets: true,
116
- });
119
+ })) ?? (await packetSink.getFirstPacket({ verifyKeyPackets: true }));
117
120
  const hasAlpha = startPacket?.sideData.alpha;
118
121
  if (hasAlpha && !canBrowserUseWebGl2()) {
119
122
  return 'has-alpha';
@@ -0,0 +1,4 @@
1
+ export declare const rotateFrame: ({ frame, rotation, }: {
2
+ frame: VideoFrame;
3
+ rotation: number;
4
+ }) => Promise<ImageBitmap>;
@@ -0,0 +1,34 @@
1
+ export const rotateFrame = async ({ frame, rotation, }) => {
2
+ if (rotation === 0) {
3
+ const directBitmap = await createImageBitmap(frame);
4
+ frame.close();
5
+ return directBitmap;
6
+ }
7
+ const width = rotation === 90 || rotation === 270
8
+ ? frame.displayHeight
9
+ : frame.displayWidth;
10
+ const height = rotation === 90 || rotation === 270
11
+ ? frame.displayWidth
12
+ : frame.displayHeight;
13
+ const canvas = new OffscreenCanvas(width, height);
14
+ const ctx = canvas.getContext('2d');
15
+ if (!ctx) {
16
+ throw new Error('Could not get 2d context');
17
+ }
18
+ canvas.width = width;
19
+ canvas.height = height;
20
+ if (rotation === 90) {
21
+ ctx.translate(width, 0);
22
+ }
23
+ else if (rotation === 180) {
24
+ ctx.translate(width, height);
25
+ }
26
+ else if (rotation === 270) {
27
+ ctx.translate(0, height);
28
+ }
29
+ ctx.rotate(rotation * (Math.PI / 180));
30
+ ctx.drawImage(frame, 0, 0);
31
+ const bitmap = await createImageBitmap(canvas);
32
+ frame.close();
33
+ return bitmap;
34
+ };
@@ -0,0 +1,14 @@
1
+ import type { VideoSample } from 'mediabunny';
2
+ /**
3
+ * Once we convert a VideoSample to a VideoFrame, we lose the rotation
4
+ * https://github.com/Vanilagy/mediabunny/pull/212
5
+ * This will be fixed in Mediabunny v2, but for now, we need to manually fix it.
6
+ *
7
+ * I'm actually wondering if your PR is actually a breaking change
8
+ I would say it kinda is actually
9
+ Because, previously only the VideoSample had rotation but the video frame you got from .toVideoFrame() was unrotated. Now, the resulting VideoFrame will be rotated, so drawing it to a canvas will behave differently. To me, this is a breaking change
10
+ People's old code that manually handled the rotation will break here
11
+ So I think this is actually a PR for v2
12
+ And for Remotion, you can do a temporary workaround fix by cloning the VideoFrame and overriding rotation that way, then closing the old frame, then transferring the cloned frame
13
+ */
14
+ export declare const toVideoFrameFixedRotation: (videoSample: VideoSample) => VideoFrame;
@@ -0,0 +1,41 @@
1
+ /**
2
+ * Once we convert a VideoSample to a VideoFrame, we lose the rotation
3
+ * https://github.com/Vanilagy/mediabunny/pull/212
4
+ * This will be fixed in Mediabunny v2, but for now, we need to manually fix it.
5
+ *
6
+ * I'm actually wondering if your PR is actually a breaking change
7
+ I would say it kinda is actually
8
+ Because, previously only the VideoSample had rotation but the video frame you got from .toVideoFrame() was unrotated. Now, the resulting VideoFrame will be rotated, so drawing it to a canvas will behave differently. To me, this is a breaking change
9
+ People's old code that manually handled the rotation will break here
10
+ So I think this is actually a PR for v2
11
+ And for Remotion, you can do a temporary workaround fix by cloning the VideoFrame and overriding rotation that way, then closing the old frame, then transferring the cloned frame
12
+ */
13
+ export const toVideoFrameFixedRotation = (videoSample) => {
14
+ const frame = videoSample.toVideoFrame();
15
+ if (videoSample.rotation === 0) {
16
+ return frame;
17
+ }
18
+ const canvas = new OffscreenCanvas(width, height);
19
+ const ctx = canvas.getContext('2d');
20
+ if (!ctx) {
21
+ throw new Error('Could not get 2d context');
22
+ }
23
+ canvas.width = width;
24
+ canvas.height = height;
25
+ if (canvasRotationToApply === 90) {
26
+ ctx.translate(width, 0);
27
+ }
28
+ else if (canvasRotationToApply === 180) {
29
+ ctx.translate(width, height);
30
+ }
31
+ else if (canvasRotationToApply === 270) {
32
+ ctx.translate(0, height);
33
+ }
34
+ console.log('sample rotation', videoSample.rotation);
35
+ // @ts-expect-error - rotation is not a known property of VideoFrameInit
36
+ const fixedFrame = new VideoFrame(frame, { rotation: videoSample.rotation });
37
+ frame.close();
38
+ // @ts-expect-error - rotation is not a known property of VideoFrameInit
39
+ console.log('fixed frame rotation', fixedFrame.rotation);
40
+ return fixedFrame;
41
+ };
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@remotion/media",
3
- "version": "4.0.372",
3
+ "version": "4.0.374",
4
4
  "main": "dist/index.js",
5
5
  "types": "dist/index.d.ts",
6
6
  "module": "dist/esm/index.mjs",
@@ -22,20 +22,20 @@
22
22
  },
23
23
  "dependencies": {
24
24
  "mediabunny": "1.24.3",
25
- "remotion": "4.0.372",
26
- "webdriverio": "9.19.2"
25
+ "remotion": "4.0.374"
27
26
  },
28
27
  "peerDependencies": {
29
28
  "react": ">=16.8.0",
30
29
  "react-dom": ">=16.8.0"
31
30
  },
32
31
  "devDependencies": {
33
- "@remotion/eslint-config-internal": "4.0.372",
34
- "@vitest/browser": "^3.2.4",
32
+ "@remotion/eslint-config-internal": "4.0.374",
33
+ "@vitest/browser-webdriverio": "4.0.7",
35
34
  "eslint": "9.19.0",
36
35
  "react": "19.0.0",
37
36
  "react-dom": "19.0.0",
38
- "vitest": "3.2.4"
37
+ "vitest": "4.0.7",
38
+ "webdriverio": "9.19.2"
39
39
  },
40
40
  "keywords": [],
41
41
  "publishConfig": {