@remotion/media 4.0.430 → 4.0.432

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (63) hide show
  1. package/dist/audio/audio-for-preview.d.ts +1 -0
  2. package/dist/audio/audio-preview-iterator.d.ts +16 -9
  3. package/dist/audio/props.d.ts +1 -0
  4. package/dist/audio-iterator-manager.d.ts +24 -13
  5. package/dist/debug-overlay/preview-overlay.d.ts +24 -14
  6. package/dist/esm/index.mjs +755 -537
  7. package/dist/make-iterator-with-priming.d.ts +6 -0
  8. package/dist/media-player.d.ts +12 -7
  9. package/dist/prewarm-iterator-for-looping.d.ts +3 -2
  10. package/dist/set-global-time-anchor.d.ts +11 -0
  11. package/dist/shared-audio-context-for-media-player.d.ts +8 -0
  12. package/dist/use-common-effects.d.ts +32 -0
  13. package/dist/video/props.d.ts +1 -0
  14. package/dist/video/video-for-preview.d.ts +1 -0
  15. package/package.json +4 -4
  16. package/dist/audio/allow-wait.d.ts +0 -6
  17. package/dist/audio/allow-wait.js +0 -15
  18. package/dist/audio/audio-for-preview.js +0 -304
  19. package/dist/audio/audio-for-rendering.js +0 -194
  20. package/dist/audio/audio-preview-iterator.js +0 -176
  21. package/dist/audio/audio.js +0 -20
  22. package/dist/audio/props.js +0 -1
  23. package/dist/audio-extraction/audio-cache.js +0 -66
  24. package/dist/audio-extraction/audio-iterator.js +0 -132
  25. package/dist/audio-extraction/audio-manager.js +0 -113
  26. package/dist/audio-extraction/extract-audio.js +0 -132
  27. package/dist/audio-iterator-manager.js +0 -228
  28. package/dist/browser-can-use-webgl2.js +0 -13
  29. package/dist/caches.js +0 -61
  30. package/dist/calculate-playbacktime.js +0 -4
  31. package/dist/convert-audiodata/apply-volume.js +0 -17
  32. package/dist/convert-audiodata/combine-audiodata.js +0 -23
  33. package/dist/convert-audiodata/convert-audiodata.js +0 -73
  34. package/dist/convert-audiodata/resample-audiodata.js +0 -94
  35. package/dist/debug-overlay/preview-overlay.js +0 -42
  36. package/dist/extract-frame-and-audio.js +0 -101
  37. package/dist/get-sink.js +0 -15
  38. package/dist/get-time-in-seconds.js +0 -40
  39. package/dist/helpers/round-to-4-digits.js +0 -4
  40. package/dist/index.js +0 -12
  41. package/dist/is-type-of-error.js +0 -20
  42. package/dist/looped-frame.js +0 -10
  43. package/dist/media-player.js +0 -431
  44. package/dist/nonce-manager.js +0 -13
  45. package/dist/prewarm-iterator-for-looping.js +0 -56
  46. package/dist/render-timestamp-range.js +0 -9
  47. package/dist/show-in-timeline.js +0 -31
  48. package/dist/use-media-in-timeline.js +0 -103
  49. package/dist/video/props.js +0 -1
  50. package/dist/video/video-for-preview.js +0 -331
  51. package/dist/video/video-for-rendering.js +0 -263
  52. package/dist/video/video-preview-iterator.js +0 -122
  53. package/dist/video/video.js +0 -35
  54. package/dist/video-extraction/add-broadcast-channel-listener.js +0 -125
  55. package/dist/video-extraction/extract-frame-via-broadcast-channel.js +0 -113
  56. package/dist/video-extraction/extract-frame.js +0 -85
  57. package/dist/video-extraction/get-allocation-size.js +0 -6
  58. package/dist/video-extraction/get-frames-since-keyframe.js +0 -108
  59. package/dist/video-extraction/keyframe-bank.js +0 -159
  60. package/dist/video-extraction/keyframe-manager.js +0 -206
  61. package/dist/video-extraction/remember-actual-matroska-timestamps.js +0 -19
  62. package/dist/video-extraction/rotate-frame.js +0 -34
  63. package/dist/video-iterator-manager.js +0 -109
@@ -1,194 +0,0 @@
1
- import { jsx as _jsx } from "react/jsx-runtime";
2
- import { useContext, useLayoutEffect, useMemo, useState } from 'react';
3
- import { cancelRender, Html5Audio, Internals, random, useCurrentFrame, useDelayRender, useRemotionEnvironment, } from 'remotion';
4
- import { useMaxMediaCacheSize } from '../caches';
5
- import { applyVolume } from '../convert-audiodata/apply-volume';
6
- import { TARGET_SAMPLE_RATE } from '../convert-audiodata/resample-audiodata';
7
- import { frameForVolumeProp } from '../looped-frame';
8
- import { extractFrameViaBroadcastChannel } from '../video-extraction/extract-frame-via-broadcast-channel';
9
- export const AudioForRendering = ({ volume: volumeProp, playbackRate, src, muted, loopVolumeCurveBehavior, delayRenderRetries, delayRenderTimeoutInMilliseconds, logLevel = window.remotion_logLevel ?? 'info', loop, fallbackHtml5AudioProps, audioStreamIndex, showInTimeline, style, name, disallowFallbackToHtml5Audio, toneFrequency, trimAfter, trimBefore, }) => {
10
- const frame = useCurrentFrame();
11
- const absoluteFrame = Internals.useTimelinePosition();
12
- const videoConfig = Internals.useUnsafeVideoConfig();
13
- const { registerRenderAsset, unregisterRenderAsset } = useContext(Internals.RenderAssetManager);
14
- const startsAt = Internals.useMediaStartsAt();
15
- const environment = useRemotionEnvironment();
16
- if (!videoConfig) {
17
- throw new Error('No video config found');
18
- }
19
- if (!src) {
20
- throw new TypeError('No `src` was passed to <Audio>.');
21
- }
22
- const { fps } = videoConfig;
23
- const { delayRender, continueRender } = useDelayRender();
24
- const [replaceWithHtml5Audio, setReplaceWithHtml5Audio] = useState(false);
25
- const sequenceContext = useContext(Internals.SequenceContext);
26
- // Generate a string that's as unique as possible for this asset
27
- // but at the same time the same on all threads
28
- const id = useMemo(() => `media-audio-${random(src)}-${sequenceContext?.cumulatedFrom}-${sequenceContext?.relativeFrom}-${sequenceContext?.durationInFrames}`, [
29
- src,
30
- sequenceContext?.cumulatedFrom,
31
- sequenceContext?.relativeFrom,
32
- sequenceContext?.durationInFrames,
33
- ]);
34
- const maxCacheSize = useMaxMediaCacheSize(logLevel ?? window.remotion_logLevel);
35
- const audioEnabled = Internals.useAudioEnabled();
36
- useLayoutEffect(() => {
37
- const timestamp = frame / fps;
38
- const durationInSeconds = 1 / fps;
39
- const shouldRenderAudio = (() => {
40
- if (!audioEnabled) {
41
- return false;
42
- }
43
- if (muted) {
44
- return false;
45
- }
46
- return true;
47
- })();
48
- if (!shouldRenderAudio) {
49
- return;
50
- }
51
- if (replaceWithHtml5Audio) {
52
- return;
53
- }
54
- const newHandle = delayRender(`Extracting audio for frame ${frame}`, {
55
- retries: delayRenderRetries ?? undefined,
56
- timeoutInMilliseconds: delayRenderTimeoutInMilliseconds ?? undefined,
57
- });
58
- extractFrameViaBroadcastChannel({
59
- src,
60
- timeInSeconds: timestamp,
61
- durationInSeconds,
62
- playbackRate: playbackRate ?? 1,
63
- logLevel: logLevel ?? window.remotion_logLevel,
64
- includeAudio: shouldRenderAudio,
65
- includeVideo: false,
66
- isClientSideRendering: environment.isClientSideRendering,
67
- loop: loop ?? false,
68
- audioStreamIndex: audioStreamIndex ?? 0,
69
- trimAfter,
70
- trimBefore,
71
- fps,
72
- maxCacheSize,
73
- })
74
- .then((result) => {
75
- if (result.type === 'unknown-container-format') {
76
- if (environment.isClientSideRendering) {
77
- cancelRender(new Error(`Cannot render audio "${src}": Unknown container format. See supported formats: https://www.remotion.dev/docs/mediabunny/formats`));
78
- return;
79
- }
80
- if (disallowFallbackToHtml5Audio) {
81
- cancelRender(new Error(`Unknown container format ${src}, and 'disallowFallbackToHtml5Audio' was set. Failing the render.`));
82
- }
83
- Internals.Log.warn({
84
- logLevel: logLevel ?? window.remotion_logLevel,
85
- tag: '@remotion/media',
86
- }, `Unknown container format for ${src} (Supported formats: https://www.remotion.dev/docs/mediabunny/formats), falling back to <Html5Audio>`);
87
- setReplaceWithHtml5Audio(true);
88
- return;
89
- }
90
- if (result.type === 'cannot-decode') {
91
- if (environment.isClientSideRendering) {
92
- cancelRender(new Error(`Cannot render audio "${src}": The audio could not be decoded by the browser.`));
93
- return;
94
- }
95
- if (disallowFallbackToHtml5Audio) {
96
- cancelRender(new Error(`Cannot decode ${src}, and 'disallowFallbackToHtml5Audio' was set. Failing the render.`));
97
- }
98
- Internals.Log.warn({
99
- logLevel: logLevel ?? window.remotion_logLevel,
100
- tag: '@remotion/media',
101
- }, `Cannot decode ${src}, falling back to <Html5Audio>`);
102
- setReplaceWithHtml5Audio(true);
103
- return;
104
- }
105
- if (result.type === 'cannot-decode-alpha') {
106
- throw new Error(`Cannot decode alpha component for ${src}, and 'disallowFallbackToHtml5Audio' was set. But this should never happen, since you used the <Audio> tag. Please report this as a bug.`);
107
- }
108
- if (result.type === 'network-error') {
109
- if (environment.isClientSideRendering) {
110
- cancelRender(new Error(`Cannot render audio "${src}": Network error while fetching the audio (possibly CORS).`));
111
- return;
112
- }
113
- if (disallowFallbackToHtml5Audio) {
114
- cancelRender(new Error(`Cannot decode ${src}, and 'disallowFallbackToHtml5Audio' was set. Failing the render.`));
115
- }
116
- Internals.Log.warn({
117
- logLevel: logLevel ?? window.remotion_logLevel,
118
- tag: '@remotion/media',
119
- }, `Network error fetching ${src}, falling back to <Html5Audio>`);
120
- setReplaceWithHtml5Audio(true);
121
- return;
122
- }
123
- const { audio, durationInSeconds: assetDurationInSeconds } = result;
124
- const volumePropsFrame = frameForVolumeProp({
125
- behavior: loopVolumeCurveBehavior ?? 'repeat',
126
- loop: loop ?? false,
127
- assetDurationInSeconds: assetDurationInSeconds ?? 0,
128
- fps,
129
- frame,
130
- startsAt,
131
- });
132
- const volume = Internals.evaluateVolume({
133
- volume: volumeProp,
134
- frame: volumePropsFrame,
135
- mediaVolume: 1,
136
- });
137
- Internals.warnAboutTooHighVolume(volume);
138
- if (audio && volume > 0) {
139
- applyVolume(audio.data, volume);
140
- registerRenderAsset({
141
- type: 'inline-audio',
142
- id,
143
- audio: environment.isClientSideRendering
144
- ? audio.data
145
- : Array.from(audio.data),
146
- frame: absoluteFrame,
147
- timestamp: audio.timestamp,
148
- duration: (audio.numberOfFrames / TARGET_SAMPLE_RATE) * 1000000,
149
- toneFrequency: toneFrequency ?? 1,
150
- });
151
- }
152
- continueRender(newHandle);
153
- })
154
- .catch((error) => {
155
- cancelRender(error);
156
- });
157
- return () => {
158
- continueRender(newHandle);
159
- unregisterRenderAsset(id);
160
- };
161
- }, [
162
- absoluteFrame,
163
- continueRender,
164
- delayRender,
165
- delayRenderRetries,
166
- delayRenderTimeoutInMilliseconds,
167
- disallowFallbackToHtml5Audio,
168
- environment.isClientSideRendering,
169
- fps,
170
- frame,
171
- id,
172
- logLevel,
173
- loop,
174
- loopVolumeCurveBehavior,
175
- muted,
176
- playbackRate,
177
- registerRenderAsset,
178
- src,
179
- startsAt,
180
- unregisterRenderAsset,
181
- volumeProp,
182
- audioStreamIndex,
183
- toneFrequency,
184
- trimAfter,
185
- trimBefore,
186
- replaceWithHtml5Audio,
187
- maxCacheSize,
188
- audioEnabled,
189
- ]);
190
- if (replaceWithHtml5Audio) {
191
- return (_jsx(Html5Audio, { src: src, playbackRate: playbackRate, muted: muted, loop: loop, volume: volumeProp, delayRenderRetries: delayRenderRetries, delayRenderTimeoutInMilliseconds: delayRenderTimeoutInMilliseconds, style: style, loopVolumeCurveBehavior: loopVolumeCurveBehavior, audioStreamIndex: audioStreamIndex, useWebAudioApi: fallbackHtml5AudioProps?.useWebAudioApi, onError: fallbackHtml5AudioProps?.onError, toneFrequency: toneFrequency, acceptableTimeShiftInSeconds: fallbackHtml5AudioProps?.acceptableTimeShiftInSeconds, name: name, showInTimeline: showInTimeline }));
192
- }
193
- return null;
194
- };
@@ -1,176 +0,0 @@
1
- import { roundTo4Digits } from '../helpers/round-to-4-digits';
2
- import { allowWaitRoutine } from './allow-wait';
3
- export const HEALTHY_BUFFER_THRESHOLD_SECONDS = 1;
4
- export const makeAudioIterator = (startFromSecond, cache) => {
5
- let destroyed = false;
6
- const iterator = cache.makeIteratorOrUsePrewarmed(startFromSecond);
7
- const queuedAudioNodes = [];
8
- const audioChunksForAfterResuming = [];
9
- let mostRecentTimestamp = -Infinity;
10
- const cleanupAudioQueue = () => {
11
- for (const node of queuedAudioNodes) {
12
- node.node.stop();
13
- }
14
- queuedAudioNodes.length = 0;
15
- };
16
- const getNextOrNullIfNotAvailable = async (allowWait) => {
17
- const next = iterator.next();
18
- const result = allowWait
19
- ? await allowWaitRoutine(next, allowWait)
20
- : await Promise.race([
21
- next,
22
- new Promise((resolve) => {
23
- Promise.resolve().then(() => resolve());
24
- }),
25
- ]);
26
- if (!result) {
27
- return {
28
- type: 'need-to-wait-for-it',
29
- waitPromise: async () => {
30
- const res = await next;
31
- return res.value;
32
- },
33
- };
34
- }
35
- if (result.value) {
36
- mostRecentTimestamp = Math.max(mostRecentTimestamp, result.value.timestamp + result.value.duration);
37
- return {
38
- type: 'got-buffer',
39
- buffer: result.value,
40
- };
41
- }
42
- return {
43
- type: 'got-end',
44
- mostRecentTimestamp,
45
- };
46
- };
47
- const tryToSatisfySeek = async (time, allowWait, onBufferScheduled) => {
48
- if (time < startFromSecond) {
49
- return {
50
- type: 'not-satisfied',
51
- reason: `time requested is before the start of the iterator`,
52
- };
53
- }
54
- while (true) {
55
- const buffer = await getNextOrNullIfNotAvailable(allowWait);
56
- if (buffer.type === 'need-to-wait-for-it') {
57
- return {
58
- type: 'not-satisfied',
59
- reason: 'iterator did not have buffer ready',
60
- };
61
- }
62
- if (buffer.type === 'got-end') {
63
- if (time >= mostRecentTimestamp) {
64
- return {
65
- type: 'ended',
66
- };
67
- }
68
- return {
69
- type: 'not-satisfied',
70
- reason: `iterator ended before the requested time`,
71
- };
72
- }
73
- if (buffer.type === 'got-buffer') {
74
- const bufferTimestamp = roundTo4Digits(buffer.buffer.timestamp);
75
- const bufferEndTimestamp = roundTo4Digits(buffer.buffer.timestamp + buffer.buffer.duration);
76
- const timestamp = roundTo4Digits(time);
77
- if (roundTo4Digits(time) < bufferTimestamp) {
78
- return {
79
- type: 'not-satisfied',
80
- reason: `iterator is too far, most recently returned ${bufferTimestamp}-${bufferEndTimestamp}, requested ${time}`,
81
- };
82
- }
83
- if (bufferTimestamp <= timestamp && bufferEndTimestamp > timestamp) {
84
- onBufferScheduled(buffer.buffer);
85
- return {
86
- type: 'satisfied',
87
- };
88
- }
89
- onBufferScheduled(buffer.buffer);
90
- continue;
91
- }
92
- throw new Error('Unreachable');
93
- }
94
- };
95
- const removeAndReturnAllQueuedAudioNodes = () => {
96
- const nodes = queuedAudioNodes.slice();
97
- for (const node of nodes) {
98
- node.node.stop();
99
- }
100
- queuedAudioNodes.length = 0;
101
- return nodes;
102
- };
103
- const addChunkForAfterResuming = (buffer, timestamp) => {
104
- audioChunksForAfterResuming.push({ buffer, timestamp });
105
- };
106
- const moveQueuedChunksToPauseQueue = () => {
107
- const toQueue = removeAndReturnAllQueuedAudioNodes();
108
- for (const chunk of toQueue) {
109
- addChunkForAfterResuming(chunk.buffer, chunk.timestamp);
110
- }
111
- };
112
- const getNumberOfChunksAfterResuming = () => {
113
- return audioChunksForAfterResuming.length;
114
- };
115
- return {
116
- destroy: () => {
117
- cleanupAudioQueue();
118
- destroyed = true;
119
- iterator.return().catch(() => undefined);
120
- audioChunksForAfterResuming.length = 0;
121
- },
122
- getNext: async () => {
123
- const next = await iterator.next();
124
- if (next.value) {
125
- mostRecentTimestamp = Math.max(mostRecentTimestamp, next.value.timestamp + next.value.duration);
126
- }
127
- return next;
128
- },
129
- isDestroyed: () => {
130
- return destroyed;
131
- },
132
- addQueuedAudioNode: (node, timestamp, buffer) => {
133
- queuedAudioNodes.push({ node, timestamp, buffer });
134
- },
135
- removeQueuedAudioNode: (node) => {
136
- const index = queuedAudioNodes.findIndex((n) => n.node === node);
137
- if (index !== -1) {
138
- queuedAudioNodes.splice(index, 1);
139
- }
140
- },
141
- getAndClearAudioChunksForAfterResuming: () => {
142
- const chunks = audioChunksForAfterResuming.slice();
143
- audioChunksForAfterResuming.length = 0;
144
- return chunks;
145
- },
146
- getQueuedPeriod: () => {
147
- let until = -Infinity;
148
- let from = Infinity;
149
- for (const node of queuedAudioNodes) {
150
- until = Math.max(until, node.timestamp + node.buffer.duration);
151
- from = Math.min(from, node.timestamp);
152
- }
153
- for (const chunk of audioChunksForAfterResuming) {
154
- until = Math.max(until, chunk.timestamp + chunk.buffer.duration);
155
- from = Math.min(from, chunk.timestamp);
156
- }
157
- if (!Number.isFinite(from) || !Number.isFinite(until)) {
158
- return null;
159
- }
160
- return {
161
- from,
162
- until,
163
- };
164
- },
165
- tryToSatisfySeek,
166
- addChunkForAfterResuming,
167
- moveQueuedChunksToPauseQueue,
168
- getNumberOfChunksAfterResuming,
169
- };
170
- };
171
- export const isAlreadyQueued = (time, queuedPeriod) => {
172
- if (!queuedPeriod) {
173
- return false;
174
- }
175
- return time >= queuedPeriod.from && time < queuedPeriod.until;
176
- };
@@ -1,20 +0,0 @@
1
- import { jsx as _jsx } from "react/jsx-runtime";
2
- import { Internals, useRemotionEnvironment } from 'remotion';
3
- import { AudioForPreview } from './audio-for-preview';
4
- import { AudioForRendering } from './audio-for-rendering';
5
- const { validateMediaProps } = Internals;
6
- export const Audio = (props) => {
7
- // Should only destruct `trimBefore` and `trimAfter` from props,
8
- // rest gets drilled down
9
- const { name, stack, showInTimeline, ...otherProps } = props;
10
- const environment = useRemotionEnvironment();
11
- if (typeof props.src !== 'string') {
12
- throw new TypeError(`The \`<Audio>\` tag requires a string for \`src\`, but got ${JSON.stringify(props.src)} instead.`);
13
- }
14
- validateMediaProps({ playbackRate: props.playbackRate, volume: props.volume }, 'Audio');
15
- if (environment.isRendering) {
16
- return _jsx(AudioForRendering, { ...otherProps });
17
- }
18
- return _jsx(AudioForPreview, { name: name, ...otherProps, stack: stack ?? null });
19
- };
20
- Internals.addSequenceStackTraces(Audio);
@@ -1 +0,0 @@
1
- export {};
@@ -1,66 +0,0 @@
1
- export const makeAudioCache = () => {
2
- const timestamps = [];
3
- const samples = {};
4
- const addFrame = (sample) => {
5
- timestamps.push(sample.timestamp);
6
- samples[sample.timestamp] = sample;
7
- };
8
- const clearBeforeThreshold = (threshold) => {
9
- for (const timestamp of timestamps.slice()) {
10
- const endTimestamp = timestamp + samples[timestamp].duration;
11
- if (endTimestamp < threshold) {
12
- const isLast = timestamp === timestamps[timestamps.length - 1];
13
- if (isLast) {
14
- continue;
15
- }
16
- samples[timestamp].close();
17
- delete samples[timestamp];
18
- timestamps.splice(timestamps.indexOf(timestamp), 1);
19
- }
20
- }
21
- };
22
- const deleteAll = () => {
23
- for (const timestamp of timestamps) {
24
- samples[timestamp].close();
25
- delete samples[timestamp];
26
- }
27
- timestamps.length = 0;
28
- };
29
- const getSamples = (timestamp, durationInSeconds) => {
30
- const selected = [];
31
- for (let i = 0; i < timestamps.length; i++) {
32
- const sampleTimestamp = timestamps[i];
33
- const sample = samples[sampleTimestamp];
34
- if (sample.timestamp + sample.duration - 0.0000000001 <= timestamp) {
35
- continue;
36
- }
37
- if (sample.timestamp >= timestamp + durationInSeconds - 0.0000000001) {
38
- break;
39
- }
40
- selected.push(sample);
41
- }
42
- return selected;
43
- };
44
- const getOpenTimestamps = () => {
45
- return timestamps;
46
- };
47
- const getOldestTimestamp = () => {
48
- return timestamps[0];
49
- };
50
- const getNewestTimestamp = () => {
51
- if (timestamps.length === 0) {
52
- return null;
53
- }
54
- const sample = samples[timestamps[timestamps.length - 1]];
55
- return sample.timestamp + sample.duration;
56
- };
57
- return {
58
- addFrame,
59
- clearBeforeThreshold,
60
- deleteAll,
61
- getSamples,
62
- getOldestTimestamp,
63
- getNewestTimestamp,
64
- getOpenTimestamps,
65
- };
66
- };
@@ -1,132 +0,0 @@
1
- import { Internals } from 'remotion';
2
- import { SAFE_BACK_WINDOW_IN_SECONDS } from '../caches';
3
- import { makeAudioCache } from './audio-cache';
4
- // https://discord.com/channels/@me/1409810025844838481/1415028953093111870
5
- // Audio frames might have dependencies on previous and next frames so we need to decode a bit more
6
- // and then discard it.
7
- // The worst case seems to be FLAC files with a 65'535 sample window, which would be 1486.0ms at 44.1Khz.
8
- // So let's set a threshold of 1.5 seconds.
9
- const extraThreshold = 1.5;
10
- const warned = {};
11
- const warnAboutMatroskaOnce = (src, logLevel) => {
12
- if (warned[src]) {
13
- return;
14
- }
15
- warned[src] = true;
16
- Internals.Log.warn({ logLevel, tag: '@remotion/media' }, `Audio from ${src} will need to be read from the beginning. https://www.remotion.dev/docs/media/support#matroska-limitation`);
17
- };
18
- export const makeAudioIterator = ({ audioSampleSink, isMatroska, startTimestamp, src, actualMatroskaTimestamps, logLevel, }) => {
19
- // Matroska timestamps are not accurate unless we start from the beginning
20
- // So for matroska, we need to decode all samples :(
21
- // https://github.com/Vanilagy/mediabunny/issues/105
22
- const sampleIterator = audioSampleSink.samples(isMatroska ? 0 : Math.max(0, startTimestamp - extraThreshold));
23
- if (isMatroska) {
24
- warnAboutMatroskaOnce(src, logLevel);
25
- }
26
- let fullDuration = null;
27
- const cache = makeAudioCache();
28
- let lastUsed = Date.now();
29
- const getNextSample = async () => {
30
- lastUsed = Date.now();
31
- const { value: sample, done } = await sampleIterator.next();
32
- if (done) {
33
- fullDuration = cache.getNewestTimestamp();
34
- return null;
35
- }
36
- const realTimestamp = actualMatroskaTimestamps.getRealTimestamp(sample.timestamp);
37
- if (realTimestamp !== null && realTimestamp !== sample.timestamp) {
38
- sample.setTimestamp(realTimestamp);
39
- }
40
- actualMatroskaTimestamps.observeTimestamp(sample.timestamp);
41
- actualMatroskaTimestamps.observeTimestamp(sample.timestamp + sample.duration);
42
- cache.addFrame(sample);
43
- return sample;
44
- };
45
- const getSamples = async (timestamp, durationInSeconds) => {
46
- lastUsed = Date.now();
47
- if (fullDuration !== null && timestamp > fullDuration) {
48
- // Clear all samples before the timestamp
49
- // Do this in the while loop because samples might start from 0
50
- cache.clearBeforeThreshold(fullDuration - SAFE_BACK_WINDOW_IN_SECONDS);
51
- return [];
52
- }
53
- const samples = cache.getSamples(timestamp, durationInSeconds);
54
- const newestTimestamp = cache.getNewestTimestamp();
55
- if (newestTimestamp !== null) {
56
- if (newestTimestamp >= timestamp + durationInSeconds - 0.0000000001) {
57
- return samples;
58
- }
59
- }
60
- while (true) {
61
- const sample = await getNextSample();
62
- // Clear all samples before the timestamp
63
- // Do this in the while loop because samples might start from 0
64
- // Also do this after a sample has just been added, if it was the last sample we now have the duration
65
- // and can prevent deleting the last sample
66
- const deleteBefore = fullDuration === null ? timestamp : Math.min(timestamp, fullDuration);
67
- cache.clearBeforeThreshold(deleteBefore - SAFE_BACK_WINDOW_IN_SECONDS);
68
- if (sample === null) {
69
- break;
70
- }
71
- if (sample.timestamp + sample.duration - 0.0000000001 <= timestamp) {
72
- continue;
73
- }
74
- if (sample.timestamp >= timestamp + durationInSeconds - 0.0000000001) {
75
- break;
76
- }
77
- samples.push(sample);
78
- }
79
- return samples;
80
- };
81
- const logOpenFrames = () => {
82
- const openTimestamps = cache.getOpenTimestamps();
83
- if (openTimestamps.length > 0) {
84
- const first = openTimestamps[0];
85
- const last = openTimestamps[openTimestamps.length - 1];
86
- Internals.Log.verbose({ logLevel, tag: '@remotion/media' }, 'Open audio samples for src', src, `${first.toFixed(3)}...${last.toFixed(3)}`);
87
- }
88
- };
89
- const getCacheStats = () => {
90
- return {
91
- count: cache.getOpenTimestamps().length,
92
- size: cache.getOpenTimestamps().reduce((acc, t) => acc + t, 0),
93
- };
94
- };
95
- const canSatisfyRequestedTime = (timestamp) => {
96
- const oldestTimestamp = cache.getOldestTimestamp() ?? startTimestamp;
97
- if (fullDuration !== null && timestamp > fullDuration) {
98
- return true;
99
- }
100
- return (oldestTimestamp < timestamp && Math.abs(oldestTimestamp - timestamp) < 10);
101
- };
102
- const prepareForDeletion = () => {
103
- cache.deleteAll();
104
- sampleIterator.return().then((value) => {
105
- if (value.value) {
106
- value.value.close();
107
- }
108
- });
109
- fullDuration = null;
110
- };
111
- let op = Promise.resolve([]);
112
- return {
113
- src,
114
- getSamples: (ts, dur) => {
115
- op = op.then(() => getSamples(ts, dur));
116
- return op;
117
- },
118
- waitForCompletion: async () => {
119
- await op;
120
- return true;
121
- },
122
- canSatisfyRequestedTime,
123
- logOpenFrames,
124
- getCacheStats,
125
- getLastUsed: () => lastUsed,
126
- prepareForDeletion,
127
- startTimestamp,
128
- clearBeforeThreshold: cache.clearBeforeThreshold,
129
- getOldestTimestamp: cache.getOldestTimestamp,
130
- getNewestTimestamp: cache.getNewestTimestamp,
131
- };
132
- };