remotion 4.0.300 → 4.0.302

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (33) hide show
  1. package/bundle.ts +2 -0
  2. package/dist/cjs/audio/AudioForPreview.js +13 -14
  3. package/dist/cjs/audio/shared-audio-tags.d.ts +4 -1
  4. package/dist/cjs/audio/shared-audio-tags.js +24 -5
  5. package/dist/cjs/audio/shared-element-source-node.d.ts +8 -0
  6. package/dist/cjs/audio/shared-element-source-node.js +21 -0
  7. package/dist/cjs/audio/use-audio-context.d.ts +2 -0
  8. package/dist/cjs/audio/use-audio-context.js +36 -0
  9. package/dist/cjs/buffer-until-first-frame.js +1 -1
  10. package/dist/cjs/internals.d.ts +2 -0
  11. package/dist/cjs/media-tag-current-time-timestamp.d.ts +5 -0
  12. package/dist/cjs/media-tag-current-time-timestamp.js +23 -0
  13. package/dist/cjs/use-amplification.d.ts +7 -4
  14. package/dist/cjs/use-amplification.js +47 -32
  15. package/dist/cjs/use-media-buffering.js +6 -2
  16. package/dist/cjs/use-media-playback.d.ts +1 -2
  17. package/dist/cjs/use-media-playback.js +27 -15
  18. package/dist/cjs/use-request-video-callback-time.d.ts +4 -1
  19. package/dist/cjs/use-request-video-callback-time.js +10 -4
  20. package/dist/cjs/version.d.ts +1 -1
  21. package/dist/cjs/version.js +1 -1
  22. package/dist/cjs/video/VideoForPreview.js +23 -13
  23. package/dist/cjs/video/video-fragment.d.ts +1 -0
  24. package/dist/cjs/video/video-fragment.js +14 -3
  25. package/dist/esm/index.mjs +895 -805
  26. package/dist/esm/version.mjs +1 -1
  27. package/package.json +2 -2
  28. package/dist/cjs/propagate.d.ts +0 -1
  29. package/dist/cjs/propagate.js +0 -6
  30. package/dist/cjs/use-sync-volume-with-media-tag.d.ts +0 -9
  31. package/dist/cjs/use-sync-volume-with-media-tag.js +0 -38
  32. package/dist/cjs/video/calculate-endat.d.ts +0 -5
  33. package/dist/cjs/video/calculate-endat.js +0 -14
package/bundle.ts CHANGED
@@ -42,3 +42,5 @@ const [noReactFile] = noReactOutput.outputs;
42
42
  await Bun.write('dist/esm/no-react.mjs', await noReactFile.text());
43
43
 
44
44
  export {};
45
+
46
+ console.log('Done.');
@@ -11,7 +11,6 @@ const random_js_1 = require("../random.js");
11
11
  const use_amplification_js_1 = require("../use-amplification.js");
12
12
  const use_media_in_timeline_js_1 = require("../use-media-in-timeline.js");
13
13
  const use_media_playback_js_1 = require("../use-media-playback.js");
14
- const use_sync_volume_with_media_tag_js_1 = require("../use-sync-volume-with-media-tag.js");
15
14
  const volume_position_state_js_1 = require("../volume-position-state.js");
16
15
  const volume_prop_js_1 = require("../volume-prop.js");
17
16
  const shared_audio_tags_js_1 = require("./shared-audio-tags.js");
@@ -66,13 +65,11 @@ const AudioForDevelopmentForwardRefFunction = (props, ref) => {
66
65
  props.muted,
67
66
  props.loop,
68
67
  ]);
69
- const audioRef = (0, shared_audio_tags_js_1.useSharedAudio)(propsToPass, id).el;
70
- (0, use_sync_volume_with_media_tag_js_1.useSyncVolumeWithMediaTag)({
71
- volumePropFrame,
72
- volume,
73
- mediaVolume,
74
- mediaRef: audioRef,
75
- });
68
+ const context = (0, react_1.useContext)(shared_audio_tags_js_1.SharedAudioContext);
69
+ if (!context) {
70
+ throw new Error('SharedAudioContext not found');
71
+ }
72
+ const { el: audioRef, mediaElementSourceNode } = (0, shared_audio_tags_js_1.useSharedAudio)(propsToPass, id, context.audioContext);
76
73
  (0, use_media_in_timeline_js_1.useMediaInTimeline)({
77
74
  volume,
78
75
  mediaVolume,
@@ -88,11 +85,8 @@ const AudioForDevelopmentForwardRefFunction = (props, ref) => {
88
85
  onAutoPlayError: null,
89
86
  isPremounting: Boolean(sequenceContext === null || sequenceContext === void 0 ? void 0 : sequenceContext.premounting),
90
87
  });
91
- (0, use_amplification_js_1.useAmplification)({
92
- logLevel,
93
- mediaRef: audioRef,
94
- volume: userPreferredVolume,
95
- });
88
+ // putting playback before useVolume
89
+ // because volume looks at playbackrate
96
90
  (0, use_media_playback_js_1.useMediaPlayback)({
97
91
  mediaRef: audioRef,
98
92
  src,
@@ -103,7 +97,12 @@ const AudioForDevelopmentForwardRefFunction = (props, ref) => {
103
97
  isPremounting: Boolean(sequenceContext === null || sequenceContext === void 0 ? void 0 : sequenceContext.premounting),
104
98
  pauseWhenBuffering,
105
99
  onAutoPlayError: null,
106
- userPreferredVolume,
100
+ });
101
+ (0, use_amplification_js_1.useVolume)({
102
+ logLevel,
103
+ mediaRef: audioRef,
104
+ source: mediaElementSourceNode,
105
+ volume: userPreferredVolume,
107
106
  });
108
107
  (0, react_1.useImperativeHandle)(ref, () => {
109
108
  return audioRef.current;
@@ -1,6 +1,7 @@
1
1
  import type { ComponentType, LazyExoticComponent } from 'react';
2
2
  import React from 'react';
3
3
  import type { RemotionAudioProps } from './props.js';
4
+ import type { SharedElementSourceNode } from './shared-element-source-node.js';
4
5
  /**
5
6
  * This functionality of Remotion will keep a certain amount
6
7
  * of <audio> tags pre-mounted and by default filled with an empty audio track.
@@ -15,6 +16,7 @@ import type { RemotionAudioProps } from './props.js';
15
16
  type AudioElem = {
16
17
  id: number;
17
18
  props: RemotionAudioProps;
19
+ mediaElementSourceNode: SharedElementSourceNode | null;
18
20
  el: React.RefObject<HTMLAudioElement | null>;
19
21
  audioId: string;
20
22
  };
@@ -28,6 +30,7 @@ type SharedContext = {
28
30
  }) => void;
29
31
  playAllAudios: () => void;
30
32
  numberOfAudioTags: number;
33
+ audioContext: AudioContext | null;
31
34
  };
32
35
  export declare const SharedAudioContext: React.Context<SharedContext | null>;
33
36
  export declare const SharedAudioContextProvider: React.FC<{
@@ -35,5 +38,5 @@ export declare const SharedAudioContextProvider: React.FC<{
35
38
  readonly children: React.ReactNode;
36
39
  readonly component: LazyExoticComponent<ComponentType<Record<string, unknown>>> | null;
37
40
  }>;
38
- export declare const useSharedAudio: (aud: RemotionAudioProps, audioId: string) => AudioElem;
41
+ export declare const useSharedAudio: (aud: RemotionAudioProps, audioId: string, audioContext: AudioContext | null) => AudioElem;
39
42
  export {};
@@ -38,6 +38,8 @@ const jsx_runtime_1 = require("react/jsx-runtime");
38
38
  const react_1 = __importStar(require("react"));
39
39
  const log_level_context_js_1 = require("../log-level-context.js");
40
40
  const play_and_handle_not_allowed_error_js_1 = require("../play-and-handle-not-allowed-error.js");
41
+ const shared_element_source_node_js_1 = require("./shared-element-source-node.js");
42
+ const use_audio_context_js_1 = require("./use-audio-context.js");
41
43
  const EMPTY_AUDIO = 'data:audio/mp3;base64,/+MYxAAJcAV8AAgAABn//////+/gQ5BAMA+D4Pg+BAQBAEAwD4Pg+D4EBAEAQDAPg++hYBH///hUFQVBUFREDQNHmf///////+MYxBUGkAGIMAAAAP/29Xt6lUxBTUUzLjEwMFVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVV/+MYxDUAAANIAAAAAFVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVV';
42
44
  const compareProps = (obj1, obj2) => {
43
45
  const keysA = Object.keys(obj1).sort();
@@ -83,6 +85,8 @@ const SharedAudioContextProvider = ({ children, numberOfAudioTags, component })
83
85
  });
84
86
  }, [numberOfAudioTags]);
85
87
  const takenAudios = (0, react_1.useRef)(new Array(numberOfAudioTags).fill(false));
88
+ const logLevel = (0, log_level_context_js_1.useLogLevel)();
89
+ const audioContext = (0, use_audio_context_js_1.useSingletonAudioContext)(logLevel);
86
90
  const rerenderAudios = (0, react_1.useCallback)(() => {
87
91
  refs.forEach(({ ref, id }) => {
88
92
  var _a;
@@ -125,6 +129,12 @@ const SharedAudioContextProvider = ({ children, numberOfAudioTags, component })
125
129
  takenAudios.current = cloned;
126
130
  const newElem = {
127
131
  props: aud,
132
+ mediaElementSourceNode: audioContext
133
+ ? (0, shared_element_source_node_js_1.makeSharedElementSourceNode)({
134
+ audioContext,
135
+ ref,
136
+ })
137
+ : null,
128
138
  id,
129
139
  el: ref,
130
140
  audioId,
@@ -132,7 +142,7 @@ const SharedAudioContextProvider = ({ children, numberOfAudioTags, component })
132
142
  (_b = audios.current) === null || _b === void 0 ? void 0 : _b.push(newElem);
133
143
  rerenderAudios();
134
144
  return newElem;
135
- }, [numberOfAudioTags, refs, rerenderAudios]);
145
+ }, [numberOfAudioTags, refs, rerenderAudios, audioContext]);
136
146
  const unregisterAudio = (0, react_1.useCallback)((id) => {
137
147
  var _a;
138
148
  const cloned = [...takenAudios.current];
@@ -167,7 +177,6 @@ const SharedAudioContextProvider = ({ children, numberOfAudioTags, component })
167
177
  rerenderAudios();
168
178
  }
169
179
  }, [rerenderAudios]);
170
- const logLevel = (0, log_level_context_js_1.useLogLevel)();
171
180
  const mountTime = (0, log_level_context_js_1.useMountTime)();
172
181
  const playAllAudios = (0, react_1.useCallback)(() => {
173
182
  refs.forEach((ref) => {
@@ -180,7 +189,8 @@ const SharedAudioContextProvider = ({ children, numberOfAudioTags, component })
180
189
  reason: 'playing all audios',
181
190
  });
182
191
  });
183
- }, [logLevel, mountTime, refs]);
192
+ audioContext === null || audioContext === void 0 ? void 0 : audioContext.resume();
193
+ }, [audioContext, logLevel, mountTime, refs]);
184
194
  const value = (0, react_1.useMemo)(() => {
185
195
  return {
186
196
  registerAudio,
@@ -188,6 +198,7 @@ const SharedAudioContextProvider = ({ children, numberOfAudioTags, component })
188
198
  updateAudio,
189
199
  playAllAudios,
190
200
  numberOfAudioTags,
201
+ audioContext,
191
202
  };
192
203
  }, [
193
204
  numberOfAudioTags,
@@ -195,6 +206,7 @@ const SharedAudioContextProvider = ({ children, numberOfAudioTags, component })
195
206
  registerAudio,
196
207
  unregisterAudio,
197
208
  updateAudio,
209
+ audioContext,
198
210
  ]);
199
211
  // Fixing a bug: In React, if a component is unmounted using useInsertionEffect, then
200
212
  // the cleanup function does sometimes not work properly. That is why when we
@@ -221,7 +233,7 @@ const SharedAudioContextProvider = ({ children, numberOfAudioTags, component })
221
233
  }), children] }));
222
234
  };
223
235
  exports.SharedAudioContextProvider = SharedAudioContextProvider;
224
- const useSharedAudio = (aud, audioId) => {
236
+ const useSharedAudio = (aud, audioId, audioContext) => {
225
237
  var _a;
226
238
  const ctx = (0, react_1.useContext)(exports.SharedAudioContext);
227
239
  /**
@@ -231,11 +243,18 @@ const useSharedAudio = (aud, audioId) => {
231
243
  if (ctx && ctx.numberOfAudioTags > 0) {
232
244
  return ctx.registerAudio(aud, audioId);
233
245
  }
246
+ const el = react_1.default.createRef();
234
247
  return {
235
- el: react_1.default.createRef(),
248
+ el,
236
249
  id: Math.random(),
237
250
  props: aud,
238
251
  audioId,
252
+ mediaElementSourceNode: audioContext
253
+ ? (0, shared_element_source_node_js_1.makeSharedElementSourceNode)({
254
+ audioContext,
255
+ ref: el,
256
+ })
257
+ : null,
239
258
  };
240
259
  });
241
260
  /**
@@ -0,0 +1,8 @@
1
+ export declare const makeSharedElementSourceNode: ({ audioContext, ref, }: {
2
+ audioContext: AudioContext;
3
+ ref: React.RefObject<HTMLAudioElement | null>;
4
+ }) => {
5
+ attemptToConnect: () => void;
6
+ get: () => MediaElementAudioSourceNode;
7
+ };
8
+ export type SharedElementSourceNode = ReturnType<typeof makeSharedElementSourceNode>;
@@ -0,0 +1,21 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.makeSharedElementSourceNode = void 0;
4
+ const makeSharedElementSourceNode = ({ audioContext, ref, }) => {
5
+ let connected = null;
6
+ return {
7
+ attemptToConnect: () => {
8
+ if (!connected && ref.current) {
9
+ const mediaElementSourceNode = audioContext.createMediaElementSource(ref.current);
10
+ connected = mediaElementSourceNode;
11
+ }
12
+ },
13
+ get: () => {
14
+ if (!connected) {
15
+ throw new Error('Audio element not connected');
16
+ }
17
+ return connected;
18
+ },
19
+ };
20
+ };
21
+ exports.makeSharedElementSourceNode = makeSharedElementSourceNode;
@@ -0,0 +1,2 @@
1
+ import type { LogLevel } from '../log';
2
+ export declare const useSingletonAudioContext: (logLevel: LogLevel) => AudioContext | null;
@@ -0,0 +1,36 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.useSingletonAudioContext = void 0;
4
+ const react_1 = require("react");
5
+ const log_1 = require("../log");
6
+ let warned = false;
7
+ const warnOnce = (logLevel) => {
8
+ if (warned) {
9
+ return;
10
+ }
11
+ warned = true;
12
+ // Don't pullute logs if in SSR
13
+ if (typeof window !== 'undefined') {
14
+ log_1.Log.warn(logLevel, 'AudioContext is not supported in this browser');
15
+ }
16
+ };
17
+ const useSingletonAudioContext = (logLevel) => {
18
+ const audioContext = (0, react_1.useMemo)(() => {
19
+ if (typeof AudioContext === 'undefined') {
20
+ warnOnce(logLevel);
21
+ return null;
22
+ }
23
+ return new AudioContext({
24
+ latencyHint: 'interactive',
25
+ });
26
+ }, [logLevel]);
27
+ (0, react_1.useEffect)(() => {
28
+ return () => {
29
+ if (audioContext) {
30
+ audioContext.close();
31
+ }
32
+ };
33
+ }, [audioContext]);
34
+ return audioContext;
35
+ };
36
+ exports.useSingletonAudioContext = useSingletonAudioContext;
@@ -28,7 +28,7 @@ const useBufferUntilFirstFrame = ({ mediaRef, mediaType, onVariableFpsVideoDetec
28
28
  if (!current) {
29
29
  return;
30
30
  }
31
- if (current.readyState >= current.HAVE_ENOUGH_DATA &&
31
+ if (current.readyState >= current.HAVE_FUTURE_DATA &&
32
32
  !isSafariWebkit() &&
33
33
  // In Desktop Chrome, the video might switch to playing
34
34
  // but does not play due to Bluetooth headphones
@@ -51,6 +51,7 @@ export declare const Internals: {
51
51
  registerAudio: (aud: import("./index.js").RemotionAudioProps, audioId: string) => {
52
52
  id: number;
53
53
  props: import("./index.js").RemotionAudioProps;
54
+ mediaElementSourceNode: import("./audio/shared-element-source-node.js").SharedElementSourceNode | null;
54
55
  el: React.RefObject<HTMLAudioElement | null>;
55
56
  audioId: string;
56
57
  };
@@ -62,6 +63,7 @@ export declare const Internals: {
62
63
  }) => void;
63
64
  playAllAudios: () => void;
64
65
  numberOfAudioTags: number;
66
+ audioContext: AudioContext | null;
65
67
  } | null>;
66
68
  readonly SharedAudioContextProvider: import("react").FC<{
67
69
  readonly numberOfAudioTags: number;
@@ -0,0 +1,5 @@
1
+ import React from 'react';
2
+ export declare const useCurrentTimeOfMediaTagWithUpdateTimeStamp: (mediaRef: React.RefObject<HTMLVideoElement | HTMLAudioElement | null>) => React.RefObject<{
3
+ time: number;
4
+ lastUpdate: number;
5
+ }>;
@@ -0,0 +1,23 @@
1
+ "use strict";
2
+ var __importDefault = (this && this.__importDefault) || function (mod) {
3
+ return (mod && mod.__esModule) ? mod : { "default": mod };
4
+ };
5
+ Object.defineProperty(exports, "__esModule", { value: true });
6
+ exports.useCurrentTimeOfMediaTagWithUpdateTimeStamp = void 0;
7
+ const react_1 = __importDefault(require("react"));
8
+ const useCurrentTimeOfMediaTagWithUpdateTimeStamp = (mediaRef) => {
9
+ var _a, _b, _c, _d;
10
+ const lastUpdate = react_1.default.useRef({
11
+ time: (_b = (_a = mediaRef.current) === null || _a === void 0 ? void 0 : _a.currentTime) !== null && _b !== void 0 ? _b : 0,
12
+ lastUpdate: performance.now(),
13
+ });
14
+ const nowCurrentTime = (_d = (_c = mediaRef.current) === null || _c === void 0 ? void 0 : _c.currentTime) !== null && _d !== void 0 ? _d : null;
15
+ if (nowCurrentTime !== null) {
16
+ if (lastUpdate.current.time !== nowCurrentTime) {
17
+ lastUpdate.current.time = nowCurrentTime;
18
+ lastUpdate.current.lastUpdate = performance.now();
19
+ }
20
+ }
21
+ return lastUpdate;
22
+ };
23
+ exports.useCurrentTimeOfMediaTagWithUpdateTimeStamp = useCurrentTimeOfMediaTagWithUpdateTimeStamp;
@@ -1,13 +1,16 @@
1
1
  import { type RefObject } from 'react';
2
+ import type { SharedElementSourceNode } from './audio/shared-element-source-node';
2
3
  import type { LogLevel } from './log';
3
4
  type AudioItems = {
4
5
  gainNode: GainNode;
5
- source: MediaElementAudioSourceNode;
6
- audioContext: AudioContext;
7
6
  };
8
- export declare const getShouldAmplify: (volume: number) => boolean;
9
- export declare const useAmplification: ({ mediaRef, volume, logLevel, }: {
7
+ /**
8
+ * [1] Bug case: In Safari, you cannot combine playbackRate and volume !== 1.
9
+ * If that is the case, volume will not be applied.
10
+ */
11
+ export declare const useVolume: ({ mediaRef, volume, logLevel, source, }: {
10
12
  mediaRef: RefObject<HTMLAudioElement | HTMLVideoElement | null>;
13
+ source: SharedElementSourceNode | null;
11
14
  volume: number;
12
15
  logLevel: LogLevel;
13
16
  }) => RefObject<AudioItems | null>;
@@ -1,66 +1,81 @@
1
1
  "use strict";
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
- exports.useAmplification = exports.getShouldAmplify = void 0;
3
+ exports.useVolume = void 0;
4
4
  const react_1 = require("react");
5
+ const shared_audio_tags_1 = require("./audio/shared-audio-tags");
6
+ const is_approximately_the_same_1 = require("./is-approximately-the-same");
5
7
  const log_1 = require("./log");
8
+ const video_fragment_1 = require("./video/video-fragment");
6
9
  let warned = false;
7
- const warnOnce = (logLevel) => {
10
+ const warnSafariOnce = (logLevel) => {
8
11
  if (warned) {
9
12
  return;
10
13
  }
11
14
  warned = true;
12
- log_1.Log.warn(logLevel, 'AudioContext is not supported in this browser');
15
+ log_1.Log.warn(logLevel, 'In Safari, setting a volume and a playback rate at the same time is buggy.');
16
+ log_1.Log.warn(logLevel, 'In Desktop Safari, only volumes <= 1 will be applied.');
17
+ log_1.Log.warn(logLevel, 'In Mobile Safari, the volume will be ignored and set to 1 if a playbackRate is set.');
13
18
  };
14
- const getShouldAmplify = (volume) => {
15
- return volume > 1;
16
- };
17
- exports.getShouldAmplify = getShouldAmplify;
18
- const useAmplification = ({ mediaRef, volume, logLevel, }) => {
19
- var _a;
20
- const shouldAmplify = (0, exports.getShouldAmplify)(volume);
19
+ /**
20
+ * [1] Bug case: In Safari, you cannot combine playbackRate and volume !== 1.
21
+ * If that is the case, volume will not be applied.
22
+ */
23
+ const useVolume = ({ mediaRef, volume, logLevel, source, }) => {
24
+ var _a, _b, _c;
21
25
  const audioStuffRef = (0, react_1.useRef)(null);
22
26
  const currentVolumeRef = (0, react_1.useRef)(volume);
23
27
  currentVolumeRef.current = volume;
28
+ const sharedAudioContext = (0, react_1.useContext)(shared_audio_tags_1.SharedAudioContext);
29
+ if (!sharedAudioContext) {
30
+ throw new Error('useAmplification must be used within a SharedAudioContext');
31
+ }
32
+ const { audioContext } = sharedAudioContext;
24
33
  (0, react_1.useLayoutEffect)(() => {
25
- var _a;
26
- if (!shouldAmplify) {
34
+ var _a, _b;
35
+ if (!audioContext) {
27
36
  return;
28
37
  }
29
- if (!AudioContext) {
30
- warnOnce(logLevel);
38
+ if (!mediaRef.current) {
31
39
  return;
32
40
  }
33
- if (!mediaRef.current) {
41
+ // [1]
42
+ if (mediaRef.current.playbackRate !== 1 && (0, video_fragment_1.isSafari)()) {
43
+ warnSafariOnce(logLevel);
34
44
  return;
35
45
  }
36
- if (audioStuffRef.current) {
46
+ if (!source) {
37
47
  return;
38
48
  }
39
- const audioContext = new AudioContext({
40
- latencyHint: 'interactive',
41
- });
42
- const source = new MediaElementAudioSourceNode(audioContext, {
43
- mediaElement: mediaRef.current,
44
- });
45
49
  const gainNode = new GainNode(audioContext, {
46
- gain: Math.max(currentVolumeRef.current, 1),
50
+ gain: currentVolumeRef.current,
47
51
  });
52
+ source.attemptToConnect();
53
+ source.get().connect(gainNode);
54
+ gainNode.connect(audioContext.destination);
48
55
  audioStuffRef.current = {
49
56
  gainNode,
50
- source,
51
- audioContext,
52
57
  };
53
- source.connect(gainNode);
54
- gainNode.connect(audioContext.destination);
55
- log_1.Log.trace(logLevel, `Starting to amplify ${(_a = mediaRef.current) === null || _a === void 0 ? void 0 : _a.src}. Gain = ${currentVolumeRef.current}`);
56
- }, [logLevel, mediaRef, shouldAmplify]);
58
+ log_1.Log.trace(logLevel, `Starting to amplify ${(_a = mediaRef.current) === null || _a === void 0 ? void 0 : _a.src}. Gain = ${currentVolumeRef.current}, playbackRate = ${(_b = mediaRef.current) === null || _b === void 0 ? void 0 : _b.playbackRate}`);
59
+ return () => {
60
+ audioStuffRef.current = null;
61
+ gainNode.disconnect();
62
+ source.get().disconnect();
63
+ };
64
+ }, [logLevel, mediaRef, audioContext, source]);
57
65
  if (audioStuffRef.current) {
58
- const valueToSet = Math.max(volume, 1);
59
- if (audioStuffRef.current.gainNode.gain.value !== valueToSet) {
66
+ const valueToSet = volume;
67
+ if (!(0, is_approximately_the_same_1.isApproximatelyTheSame)(audioStuffRef.current.gainNode.gain.value, valueToSet)) {
60
68
  audioStuffRef.current.gainNode.gain.value = valueToSet;
61
69
  log_1.Log.trace(logLevel, `Setting gain to ${valueToSet} for ${(_a = mediaRef.current) === null || _a === void 0 ? void 0 : _a.src}`);
62
70
  }
63
71
  }
72
+ // [1]
73
+ if (mediaRef.current &&
74
+ (0, video_fragment_1.isSafari)() &&
75
+ ((_b = mediaRef.current) === null || _b === void 0 ? void 0 : _b.playbackRate) !== 1 &&
76
+ !(0, is_approximately_the_same_1.isApproximatelyTheSame)(volume, (_c = mediaRef.current) === null || _c === void 0 ? void 0 : _c.volume)) {
77
+ mediaRef.current.volume = Math.min(volume, 1);
78
+ }
64
79
  return audioStuffRef;
65
80
  };
66
- exports.useAmplification = useAmplification;
81
+ exports.useVolume = useVolume;
@@ -29,11 +29,13 @@ const useMediaBuffering = ({ element, shouldBuffer, isPremounting, logLevel, mou
29
29
  if (!navigator.userAgent.includes('Firefox/')) {
30
30
  (0, playback_logging_1.playbackLogging)({
31
31
  logLevel,
32
- message: `Calling .load() on ${current.src} because readyState is ${current.readyState} and it is not Firefox. Element is premounted`,
32
+ message: `Calling .load() on ${current.src} because readyState is ${current.readyState} and it is not Firefox. Element is premounted ${current.playbackRate}`,
33
33
  tag: 'load',
34
34
  mountTime,
35
35
  });
36
+ const previousPlaybackRate = current.playbackRate;
36
37
  current.load();
38
+ current.playbackRate = previousPlaybackRate;
37
39
  }
38
40
  }
39
41
  return;
@@ -114,11 +116,13 @@ const useMediaBuffering = ({ element, shouldBuffer, isPremounting, logLevel, mou
114
116
  if (!navigator.userAgent.includes('Firefox/')) {
115
117
  (0, playback_logging_1.playbackLogging)({
116
118
  logLevel,
117
- message: `Calling .load() on ${src} because readyState is ${current.readyState} and it is not Firefox.`,
119
+ message: `Calling .load() on ${src} because readyState is ${current.readyState} and it is not Firefox. ${current.playbackRate}`,
118
120
  tag: 'load',
119
121
  mountTime,
120
122
  });
123
+ const previousPlaybackRate = current.playbackRate;
121
124
  current.load();
125
+ current.playbackRate = previousPlaybackRate;
122
126
  }
123
127
  }
124
128
  else {
@@ -1,5 +1,5 @@
1
1
  import type { RefObject } from 'react';
2
- export declare const useMediaPlayback: ({ mediaRef, src, mediaType, playbackRate: localPlaybackRate, onlyWarnForMediaSeekingError, acceptableTimeshift, pauseWhenBuffering, isPremounting, onAutoPlayError, userPreferredVolume, }: {
2
+ export declare const useMediaPlayback: ({ mediaRef, src, mediaType, playbackRate: localPlaybackRate, onlyWarnForMediaSeekingError, acceptableTimeshift, pauseWhenBuffering, isPremounting, onAutoPlayError, }: {
3
3
  mediaRef: RefObject<HTMLVideoElement | HTMLAudioElement | null>;
4
4
  src: string | undefined;
5
5
  mediaType: "audio" | "video";
@@ -9,5 +9,4 @@ export declare const useMediaPlayback: ({ mediaRef, src, mediaType, playbackRate
9
9
  pauseWhenBuffering: boolean;
10
10
  isPremounting: boolean;
11
11
  onAutoPlayError: null | (() => void);
12
- userPreferredVolume: number;
13
12
  }) => void;
@@ -7,18 +7,18 @@ const buffer_until_first_frame_js_1 = require("./buffer-until-first-frame.js");
7
7
  const buffering_js_1 = require("./buffering.js");
8
8
  const log_level_context_js_1 = require("./log-level-context.js");
9
9
  const log_js_1 = require("./log.js");
10
+ const media_tag_current_time_timestamp_js_1 = require("./media-tag-current-time-timestamp.js");
10
11
  const play_and_handle_not_allowed_error_js_1 = require("./play-and-handle-not-allowed-error.js");
11
12
  const playback_logging_js_1 = require("./playback-logging.js");
12
13
  const seek_js_1 = require("./seek.js");
13
14
  const timeline_position_state_js_1 = require("./timeline-position-state.js");
14
- const use_amplification_js_1 = require("./use-amplification.js");
15
15
  const use_current_frame_js_1 = require("./use-current-frame.js");
16
16
  const use_media_buffering_js_1 = require("./use-media-buffering.js");
17
17
  const use_request_video_callback_time_js_1 = require("./use-request-video-callback-time.js");
18
18
  const use_video_config_js_1 = require("./use-video-config.js");
19
19
  const get_current_time_js_1 = require("./video/get-current-time.js");
20
20
  const warn_about_non_seekable_media_js_1 = require("./warn-about-non-seekable-media.js");
21
- const useMediaPlayback = ({ mediaRef, src, mediaType, playbackRate: localPlaybackRate, onlyWarnForMediaSeekingError, acceptableTimeshift, pauseWhenBuffering, isPremounting, onAutoPlayError, userPreferredVolume, }) => {
21
+ const useMediaPlayback = ({ mediaRef, src, mediaType, playbackRate: localPlaybackRate, onlyWarnForMediaSeekingError, acceptableTimeshift, pauseWhenBuffering, isPremounting, onAutoPlayError, }) => {
22
22
  const { playbackRate: globalPlaybackRate } = (0, react_1.useContext)(timeline_position_state_js_1.TimelineContext);
23
23
  const frame = (0, use_current_frame_js_1.useCurrentFrame)();
24
24
  const absoluteFrame = (0, timeline_position_state_js_1.useTimelinePosition)();
@@ -38,15 +38,19 @@ const useMediaPlayback = ({ mediaRef, src, mediaType, playbackRate: localPlaybac
38
38
  if (!src) {
39
39
  return;
40
40
  }
41
+ if (isVariableFpsVideoMap.current[src]) {
42
+ return;
43
+ }
41
44
  log_js_1.Log.verbose(logLevel, `Detected ${src} as a variable FPS video. Disabling buffering while seeking.`);
42
45
  isVariableFpsVideoMap.current[src] = true;
43
46
  }, [logLevel, src]);
44
- const currentTime = (0, use_request_video_callback_time_js_1.useRequestVideoCallbackTime)({
47
+ const rvcCurrentTime = (0, use_request_video_callback_time_js_1.useRequestVideoCallbackTime)({
45
48
  mediaRef,
46
49
  mediaType,
47
50
  lastSeek,
48
51
  onVariableFpsVideoDetected,
49
52
  });
53
+ const mediaTagCurrentTime = (0, media_tag_current_time_timestamp_js_1.useCurrentTimeOfMediaTagWithUpdateTimeStamp)(mediaRef);
50
54
  const desiredUnclampedTime = (0, get_current_time_js_1.getMediaTime)({
51
55
  frame,
52
56
  playbackRate: localPlaybackRate,
@@ -76,9 +80,7 @@ const useMediaPlayback = ({ mediaRef, src, mediaType, playbackRate: localPlaybac
76
80
  const DEFAULT_ACCEPTABLE_TIMESHIFT_WITH_NORMAL_PLAYBACK = 0.45;
77
81
  // If there is amplification, the acceptable timeshift is higher
78
82
  const DEFAULT_ACCEPTABLE_TIMESHIFT_WITH_AMPLIFICATION = DEFAULT_ACCEPTABLE_TIMESHIFT_WITH_NORMAL_PLAYBACK + 0.2;
79
- const defaultAcceptableTimeshift = (0, use_amplification_js_1.getShouldAmplify)(userPreferredVolume)
80
- ? DEFAULT_ACCEPTABLE_TIMESHIFT_WITH_AMPLIFICATION
81
- : DEFAULT_ACCEPTABLE_TIMESHIFT_WITH_NORMAL_PLAYBACK;
83
+ const defaultAcceptableTimeshift = DEFAULT_ACCEPTABLE_TIMESHIFT_WITH_AMPLIFICATION;
82
84
  // For short audio, a lower acceptable time shift is used
83
85
  if ((_a = mediaRef.current) === null || _a === void 0 ? void 0 : _a.duration) {
84
86
  return Math.min(mediaRef.current.duration, acceptableTimeshift !== null && acceptableTimeshift !== void 0 ? acceptableTimeshift : defaultAcceptableTimeshift);
@@ -124,8 +126,17 @@ const useMediaPlayback = ({ mediaRef, src, mediaType, playbackRate: localPlaybac
124
126
  mountTime,
125
127
  playing,
126
128
  ]);
129
+ // This must be a useLayoutEffect, because afterwards, useVolume() looks at the playbackRate
130
+ // and it is also in a useLayoutEffect.
131
+ (0, react_1.useLayoutEffect)(() => {
132
+ const playbackRateToSet = Math.max(0, playbackRate);
133
+ if (mediaRef.current &&
134
+ mediaRef.current.playbackRate !== playbackRateToSet) {
135
+ mediaRef.current.playbackRate = playbackRateToSet;
136
+ }
137
+ }, [mediaRef, playbackRate]);
127
138
  (0, react_1.useEffect)(() => {
128
- var _a;
139
+ var _a, _b, _c;
129
140
  const tagName = mediaType === 'audio' ? '<Audio>' : '<Video>';
130
141
  if (!mediaRef.current) {
131
142
  throw new Error(`No ${mediaType} ref found`);
@@ -133,22 +144,22 @@ const useMediaPlayback = ({ mediaRef, src, mediaType, playbackRate: localPlaybac
133
144
  if (!src) {
134
145
  throw new Error(`No 'src' attribute was passed to the ${tagName} element.`);
135
146
  }
136
- const playbackRateToSet = Math.max(0, playbackRate);
137
- if (mediaRef.current.playbackRate !== playbackRateToSet) {
138
- mediaRef.current.playbackRate = playbackRateToSet;
139
- }
140
147
  const { duration } = mediaRef.current;
141
148
  const shouldBeTime = !Number.isNaN(duration) && Number.isFinite(duration)
142
149
  ? Math.min(duration, desiredUnclampedTime)
143
150
  : desiredUnclampedTime;
144
- const mediaTagTime = mediaRef.current.currentTime;
145
- const rvcTime = (_a = currentTime.current) !== null && _a !== void 0 ? _a : null;
151
+ const mediaTagTime = mediaTagCurrentTime.current.time;
152
+ const rvcTime = (_b = (_a = rvcCurrentTime.current) === null || _a === void 0 ? void 0 : _a.time) !== null && _b !== void 0 ? _b : null;
146
153
  const isVariableFpsVideo = isVariableFpsVideoMap.current[src];
147
154
  const timeShiftMediaTag = Math.abs(shouldBeTime - mediaTagTime);
148
155
  const timeShiftRvcTag = rvcTime ? Math.abs(shouldBeTime - rvcTime) : null;
149
- const timeShift = timeShiftRvcTag && !isVariableFpsVideo
156
+ const mostRecentTimeshift = ((_c = rvcCurrentTime.current) === null || _c === void 0 ? void 0 : _c.lastUpdate) &&
157
+ rvcCurrentTime.current.time > mediaTagCurrentTime.current.lastUpdate
150
158
  ? timeShiftRvcTag
151
159
  : timeShiftMediaTag;
160
+ const timeShift = timeShiftRvcTag && !isVariableFpsVideo
161
+ ? mostRecentTimeshift
162
+ : timeShiftMediaTag;
152
163
  if (timeShift > acceptableTimeShiftButLessThanDuration &&
153
164
  lastSeekDueToShift.current !== shouldBeTime) {
154
165
  // If scrubbing around, adjust timing
@@ -238,7 +249,7 @@ const useMediaPlayback = ({ mediaRef, src, mediaType, playbackRate: localPlaybac
238
249
  acceptableTimeShiftButLessThanDuration,
239
250
  bufferUntilFirstFrame,
240
251
  buffering.buffering,
241
- currentTime,
252
+ rvcCurrentTime,
242
253
  logLevel,
243
254
  desiredUnclampedTime,
244
255
  isBuffering,
@@ -253,6 +264,7 @@ const useMediaPlayback = ({ mediaRef, src, mediaType, playbackRate: localPlaybac
253
264
  isPremounting,
254
265
  pauseWhenBuffering,
255
266
  mountTime,
267
+ mediaTagCurrentTime,
256
268
  ]);
257
269
  };
258
270
  exports.useMediaPlayback = useMediaPlayback;
@@ -4,4 +4,7 @@ export declare const useRequestVideoCallbackTime: ({ mediaRef, mediaType, lastSe
4
4
  mediaType: "video" | "audio";
5
5
  lastSeek: React.MutableRefObject<number | null>;
6
6
  onVariableFpsVideoDetected: () => void;
7
- }) => RefObject<number | null>;
7
+ }) => RefObject<{
8
+ time: number;
9
+ lastUpdate: number;
10
+ } | null>;