@remotion/media 4.0.428 → 4.0.430

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (56) hide show
  1. package/README.md +7 -7
  2. package/dist/audio/allow-wait.js +15 -0
  3. package/dist/audio/audio-for-preview.d.ts +0 -1
  4. package/dist/audio/audio-for-preview.js +304 -0
  5. package/dist/audio/audio-for-rendering.js +194 -0
  6. package/dist/audio/audio-preview-iterator.d.ts +4 -2
  7. package/dist/audio/audio-preview-iterator.js +176 -0
  8. package/dist/audio/audio.js +20 -0
  9. package/dist/audio/props.js +1 -0
  10. package/dist/audio-extraction/audio-cache.js +66 -0
  11. package/dist/audio-extraction/audio-iterator.js +132 -0
  12. package/dist/audio-extraction/audio-manager.js +113 -0
  13. package/dist/audio-extraction/extract-audio.js +132 -0
  14. package/dist/audio-iterator-manager.d.ts +10 -9
  15. package/dist/audio-iterator-manager.js +228 -0
  16. package/dist/browser-can-use-webgl2.js +13 -0
  17. package/dist/caches.js +61 -0
  18. package/dist/calculate-playbacktime.js +4 -0
  19. package/dist/convert-audiodata/apply-volume.js +17 -0
  20. package/dist/convert-audiodata/combine-audiodata.js +23 -0
  21. package/dist/convert-audiodata/convert-audiodata.js +73 -0
  22. package/dist/convert-audiodata/resample-audiodata.js +94 -0
  23. package/dist/debug-overlay/preview-overlay.d.ts +9 -7
  24. package/dist/debug-overlay/preview-overlay.js +42 -0
  25. package/dist/esm/index.mjs +246 -103
  26. package/dist/extract-frame-and-audio.js +101 -0
  27. package/dist/get-sink.js +15 -0
  28. package/dist/get-time-in-seconds.js +40 -0
  29. package/dist/helpers/round-to-4-digits.js +4 -0
  30. package/dist/index.js +12 -0
  31. package/dist/is-type-of-error.js +20 -0
  32. package/dist/looped-frame.js +10 -0
  33. package/dist/media-player.d.ts +9 -5
  34. package/dist/media-player.js +431 -0
  35. package/dist/nonce-manager.js +13 -0
  36. package/dist/prewarm-iterator-for-looping.js +56 -0
  37. package/dist/render-timestamp-range.js +9 -0
  38. package/dist/show-in-timeline.js +31 -0
  39. package/dist/use-media-in-timeline.d.ts +3 -2
  40. package/dist/use-media-in-timeline.js +103 -0
  41. package/dist/video/props.js +1 -0
  42. package/dist/video/video-for-preview.js +331 -0
  43. package/dist/video/video-for-rendering.js +263 -0
  44. package/dist/video/video-preview-iterator.js +122 -0
  45. package/dist/video/video.js +35 -0
  46. package/dist/video-extraction/add-broadcast-channel-listener.js +125 -0
  47. package/dist/video-extraction/extract-frame-via-broadcast-channel.js +113 -0
  48. package/dist/video-extraction/extract-frame.js +85 -0
  49. package/dist/video-extraction/get-allocation-size.js +6 -0
  50. package/dist/video-extraction/get-frames-since-keyframe.js +108 -0
  51. package/dist/video-extraction/keyframe-bank.js +159 -0
  52. package/dist/video-extraction/keyframe-manager.js +206 -0
  53. package/dist/video-extraction/remember-actual-matroska-timestamps.js +19 -0
  54. package/dist/video-extraction/rotate-frame.js +34 -0
  55. package/dist/video-iterator-manager.js +109 -0
  56. package/package.json +7 -5
@@ -0,0 +1,176 @@
1
+ import { roundTo4Digits } from '../helpers/round-to-4-digits';
2
+ import { allowWaitRoutine } from './allow-wait';
3
+ export const HEALTHY_BUFFER_THRESHOLD_SECONDS = 1;
4
+ export const makeAudioIterator = (startFromSecond, cache) => {
5
+ let destroyed = false;
6
+ const iterator = cache.makeIteratorOrUsePrewarmed(startFromSecond);
7
+ const queuedAudioNodes = [];
8
+ const audioChunksForAfterResuming = [];
9
+ let mostRecentTimestamp = -Infinity;
10
+ const cleanupAudioQueue = () => {
11
+ for (const node of queuedAudioNodes) {
12
+ node.node.stop();
13
+ }
14
+ queuedAudioNodes.length = 0;
15
+ };
16
+ const getNextOrNullIfNotAvailable = async (allowWait) => {
17
+ const next = iterator.next();
18
+ const result = allowWait
19
+ ? await allowWaitRoutine(next, allowWait)
20
+ : await Promise.race([
21
+ next,
22
+ new Promise((resolve) => {
23
+ Promise.resolve().then(() => resolve());
24
+ }),
25
+ ]);
26
+ if (!result) {
27
+ return {
28
+ type: 'need-to-wait-for-it',
29
+ waitPromise: async () => {
30
+ const res = await next;
31
+ return res.value;
32
+ },
33
+ };
34
+ }
35
+ if (result.value) {
36
+ mostRecentTimestamp = Math.max(mostRecentTimestamp, result.value.timestamp + result.value.duration);
37
+ return {
38
+ type: 'got-buffer',
39
+ buffer: result.value,
40
+ };
41
+ }
42
+ return {
43
+ type: 'got-end',
44
+ mostRecentTimestamp,
45
+ };
46
+ };
47
+ const tryToSatisfySeek = async (time, allowWait, onBufferScheduled) => {
48
+ if (time < startFromSecond) {
49
+ return {
50
+ type: 'not-satisfied',
51
+ reason: `time requested is before the start of the iterator`,
52
+ };
53
+ }
54
+ while (true) {
55
+ const buffer = await getNextOrNullIfNotAvailable(allowWait);
56
+ if (buffer.type === 'need-to-wait-for-it') {
57
+ return {
58
+ type: 'not-satisfied',
59
+ reason: 'iterator did not have buffer ready',
60
+ };
61
+ }
62
+ if (buffer.type === 'got-end') {
63
+ if (time >= mostRecentTimestamp) {
64
+ return {
65
+ type: 'ended',
66
+ };
67
+ }
68
+ return {
69
+ type: 'not-satisfied',
70
+ reason: `iterator ended before the requested time`,
71
+ };
72
+ }
73
+ if (buffer.type === 'got-buffer') {
74
+ const bufferTimestamp = roundTo4Digits(buffer.buffer.timestamp);
75
+ const bufferEndTimestamp = roundTo4Digits(buffer.buffer.timestamp + buffer.buffer.duration);
76
+ const timestamp = roundTo4Digits(time);
77
+ if (roundTo4Digits(time) < bufferTimestamp) {
78
+ return {
79
+ type: 'not-satisfied',
80
+ reason: `iterator is too far, most recently returned ${bufferTimestamp}-${bufferEndTimestamp}, requested ${time}`,
81
+ };
82
+ }
83
+ if (bufferTimestamp <= timestamp && bufferEndTimestamp > timestamp) {
84
+ onBufferScheduled(buffer.buffer);
85
+ return {
86
+ type: 'satisfied',
87
+ };
88
+ }
89
+ onBufferScheduled(buffer.buffer);
90
+ continue;
91
+ }
92
+ throw new Error('Unreachable');
93
+ }
94
+ };
95
+ const removeAndReturnAllQueuedAudioNodes = () => {
96
+ const nodes = queuedAudioNodes.slice();
97
+ for (const node of nodes) {
98
+ node.node.stop();
99
+ }
100
+ queuedAudioNodes.length = 0;
101
+ return nodes;
102
+ };
103
+ const addChunkForAfterResuming = (buffer, timestamp) => {
104
+ audioChunksForAfterResuming.push({ buffer, timestamp });
105
+ };
106
+ const moveQueuedChunksToPauseQueue = () => {
107
+ const toQueue = removeAndReturnAllQueuedAudioNodes();
108
+ for (const chunk of toQueue) {
109
+ addChunkForAfterResuming(chunk.buffer, chunk.timestamp);
110
+ }
111
+ };
112
+ const getNumberOfChunksAfterResuming = () => {
113
+ return audioChunksForAfterResuming.length;
114
+ };
115
+ return {
116
+ destroy: () => {
117
+ cleanupAudioQueue();
118
+ destroyed = true;
119
+ iterator.return().catch(() => undefined);
120
+ audioChunksForAfterResuming.length = 0;
121
+ },
122
+ getNext: async () => {
123
+ const next = await iterator.next();
124
+ if (next.value) {
125
+ mostRecentTimestamp = Math.max(mostRecentTimestamp, next.value.timestamp + next.value.duration);
126
+ }
127
+ return next;
128
+ },
129
+ isDestroyed: () => {
130
+ return destroyed;
131
+ },
132
+ addQueuedAudioNode: (node, timestamp, buffer) => {
133
+ queuedAudioNodes.push({ node, timestamp, buffer });
134
+ },
135
+ removeQueuedAudioNode: (node) => {
136
+ const index = queuedAudioNodes.findIndex((n) => n.node === node);
137
+ if (index !== -1) {
138
+ queuedAudioNodes.splice(index, 1);
139
+ }
140
+ },
141
+ getAndClearAudioChunksForAfterResuming: () => {
142
+ const chunks = audioChunksForAfterResuming.slice();
143
+ audioChunksForAfterResuming.length = 0;
144
+ return chunks;
145
+ },
146
+ getQueuedPeriod: () => {
147
+ let until = -Infinity;
148
+ let from = Infinity;
149
+ for (const node of queuedAudioNodes) {
150
+ until = Math.max(until, node.timestamp + node.buffer.duration);
151
+ from = Math.min(from, node.timestamp);
152
+ }
153
+ for (const chunk of audioChunksForAfterResuming) {
154
+ until = Math.max(until, chunk.timestamp + chunk.buffer.duration);
155
+ from = Math.min(from, chunk.timestamp);
156
+ }
157
+ if (!Number.isFinite(from) || !Number.isFinite(until)) {
158
+ return null;
159
+ }
160
+ return {
161
+ from,
162
+ until,
163
+ };
164
+ },
165
+ tryToSatisfySeek,
166
+ addChunkForAfterResuming,
167
+ moveQueuedChunksToPauseQueue,
168
+ getNumberOfChunksAfterResuming,
169
+ };
170
+ };
171
+ export const isAlreadyQueued = (time, queuedPeriod) => {
172
+ if (!queuedPeriod) {
173
+ return false;
174
+ }
175
+ return time >= queuedPeriod.from && time < queuedPeriod.until;
176
+ };
@@ -0,0 +1,20 @@
1
+ import { jsx as _jsx } from "react/jsx-runtime";
2
+ import { Internals, useRemotionEnvironment } from 'remotion';
3
+ import { AudioForPreview } from './audio-for-preview';
4
+ import { AudioForRendering } from './audio-for-rendering';
5
+ const { validateMediaProps } = Internals;
6
+ export const Audio = (props) => {
7
+ // Should only destruct `trimBefore` and `trimAfter` from props,
8
+ // rest gets drilled down
9
+ const { name, stack, showInTimeline, ...otherProps } = props;
10
+ const environment = useRemotionEnvironment();
11
+ if (typeof props.src !== 'string') {
12
+ throw new TypeError(`The \`<Audio>\` tag requires a string for \`src\`, but got ${JSON.stringify(props.src)} instead.`);
13
+ }
14
+ validateMediaProps({ playbackRate: props.playbackRate, volume: props.volume }, 'Audio');
15
+ if (environment.isRendering) {
16
+ return _jsx(AudioForRendering, { ...otherProps });
17
+ }
18
+ return _jsx(AudioForPreview, { name: name, ...otherProps, stack: stack ?? null });
19
+ };
20
+ Internals.addSequenceStackTraces(Audio);
@@ -0,0 +1 @@
1
+ export {};
@@ -0,0 +1,66 @@
1
+ export const makeAudioCache = () => {
2
+ const timestamps = [];
3
+ const samples = {};
4
+ const addFrame = (sample) => {
5
+ timestamps.push(sample.timestamp);
6
+ samples[sample.timestamp] = sample;
7
+ };
8
+ const clearBeforeThreshold = (threshold) => {
9
+ for (const timestamp of timestamps.slice()) {
10
+ const endTimestamp = timestamp + samples[timestamp].duration;
11
+ if (endTimestamp < threshold) {
12
+ const isLast = timestamp === timestamps[timestamps.length - 1];
13
+ if (isLast) {
14
+ continue;
15
+ }
16
+ samples[timestamp].close();
17
+ delete samples[timestamp];
18
+ timestamps.splice(timestamps.indexOf(timestamp), 1);
19
+ }
20
+ }
21
+ };
22
+ const deleteAll = () => {
23
+ for (const timestamp of timestamps) {
24
+ samples[timestamp].close();
25
+ delete samples[timestamp];
26
+ }
27
+ timestamps.length = 0;
28
+ };
29
+ const getSamples = (timestamp, durationInSeconds) => {
30
+ const selected = [];
31
+ for (let i = 0; i < timestamps.length; i++) {
32
+ const sampleTimestamp = timestamps[i];
33
+ const sample = samples[sampleTimestamp];
34
+ if (sample.timestamp + sample.duration - 0.0000000001 <= timestamp) {
35
+ continue;
36
+ }
37
+ if (sample.timestamp >= timestamp + durationInSeconds - 0.0000000001) {
38
+ break;
39
+ }
40
+ selected.push(sample);
41
+ }
42
+ return selected;
43
+ };
44
+ const getOpenTimestamps = () => {
45
+ return timestamps;
46
+ };
47
+ const getOldestTimestamp = () => {
48
+ return timestamps[0];
49
+ };
50
+ const getNewestTimestamp = () => {
51
+ if (timestamps.length === 0) {
52
+ return null;
53
+ }
54
+ const sample = samples[timestamps[timestamps.length - 1]];
55
+ return sample.timestamp + sample.duration;
56
+ };
57
+ return {
58
+ addFrame,
59
+ clearBeforeThreshold,
60
+ deleteAll,
61
+ getSamples,
62
+ getOldestTimestamp,
63
+ getNewestTimestamp,
64
+ getOpenTimestamps,
65
+ };
66
+ };
@@ -0,0 +1,132 @@
1
+ import { Internals } from 'remotion';
2
+ import { SAFE_BACK_WINDOW_IN_SECONDS } from '../caches';
3
+ import { makeAudioCache } from './audio-cache';
4
+ // https://discord.com/channels/@me/1409810025844838481/1415028953093111870
5
+ // Audio frames might have dependencies on previous and next frames so we need to decode a bit more
6
+ // and then discard it.
7
+ // The worst case seems to be FLAC files with a 65'535 sample window, which would be 1486.0ms at 44.1Khz.
8
+ // So let's set a threshold of 1.5 seconds.
9
+ const extraThreshold = 1.5;
10
+ const warned = {};
11
+ const warnAboutMatroskaOnce = (src, logLevel) => {
12
+ if (warned[src]) {
13
+ return;
14
+ }
15
+ warned[src] = true;
16
+ Internals.Log.warn({ logLevel, tag: '@remotion/media' }, `Audio from ${src} will need to be read from the beginning. https://www.remotion.dev/docs/media/support#matroska-limitation`);
17
+ };
18
+ export const makeAudioIterator = ({ audioSampleSink, isMatroska, startTimestamp, src, actualMatroskaTimestamps, logLevel, }) => {
19
+ // Matroska timestamps are not accurate unless we start from the beginning
20
+ // So for matroska, we need to decode all samples :(
21
+ // https://github.com/Vanilagy/mediabunny/issues/105
22
+ const sampleIterator = audioSampleSink.samples(isMatroska ? 0 : Math.max(0, startTimestamp - extraThreshold));
23
+ if (isMatroska) {
24
+ warnAboutMatroskaOnce(src, logLevel);
25
+ }
26
+ let fullDuration = null;
27
+ const cache = makeAudioCache();
28
+ let lastUsed = Date.now();
29
+ const getNextSample = async () => {
30
+ lastUsed = Date.now();
31
+ const { value: sample, done } = await sampleIterator.next();
32
+ if (done) {
33
+ fullDuration = cache.getNewestTimestamp();
34
+ return null;
35
+ }
36
+ const realTimestamp = actualMatroskaTimestamps.getRealTimestamp(sample.timestamp);
37
+ if (realTimestamp !== null && realTimestamp !== sample.timestamp) {
38
+ sample.setTimestamp(realTimestamp);
39
+ }
40
+ actualMatroskaTimestamps.observeTimestamp(sample.timestamp);
41
+ actualMatroskaTimestamps.observeTimestamp(sample.timestamp + sample.duration);
42
+ cache.addFrame(sample);
43
+ return sample;
44
+ };
45
+ const getSamples = async (timestamp, durationInSeconds) => {
46
+ lastUsed = Date.now();
47
+ if (fullDuration !== null && timestamp > fullDuration) {
48
+ // Clear all samples before the timestamp
49
+ // Do this in the while loop because samples might start from 0
50
+ cache.clearBeforeThreshold(fullDuration - SAFE_BACK_WINDOW_IN_SECONDS);
51
+ return [];
52
+ }
53
+ const samples = cache.getSamples(timestamp, durationInSeconds);
54
+ const newestTimestamp = cache.getNewestTimestamp();
55
+ if (newestTimestamp !== null) {
56
+ if (newestTimestamp >= timestamp + durationInSeconds - 0.0000000001) {
57
+ return samples;
58
+ }
59
+ }
60
+ while (true) {
61
+ const sample = await getNextSample();
62
+ // Clear all samples before the timestamp
63
+ // Do this in the while loop because samples might start from 0
64
+ // Also do this after a sample has just been added, if it was the last sample we now have the duration
65
+ // and can prevent deleting the last sample
66
+ const deleteBefore = fullDuration === null ? timestamp : Math.min(timestamp, fullDuration);
67
+ cache.clearBeforeThreshold(deleteBefore - SAFE_BACK_WINDOW_IN_SECONDS);
68
+ if (sample === null) {
69
+ break;
70
+ }
71
+ if (sample.timestamp + sample.duration - 0.0000000001 <= timestamp) {
72
+ continue;
73
+ }
74
+ if (sample.timestamp >= timestamp + durationInSeconds - 0.0000000001) {
75
+ break;
76
+ }
77
+ samples.push(sample);
78
+ }
79
+ return samples;
80
+ };
81
+ const logOpenFrames = () => {
82
+ const openTimestamps = cache.getOpenTimestamps();
83
+ if (openTimestamps.length > 0) {
84
+ const first = openTimestamps[0];
85
+ const last = openTimestamps[openTimestamps.length - 1];
86
+ Internals.Log.verbose({ logLevel, tag: '@remotion/media' }, 'Open audio samples for src', src, `${first.toFixed(3)}...${last.toFixed(3)}`);
87
+ }
88
+ };
89
+ const getCacheStats = () => {
90
+ return {
91
+ count: cache.getOpenTimestamps().length,
92
+ size: cache.getOpenTimestamps().reduce((acc, t) => acc + t, 0),
93
+ };
94
+ };
95
+ const canSatisfyRequestedTime = (timestamp) => {
96
+ const oldestTimestamp = cache.getOldestTimestamp() ?? startTimestamp;
97
+ if (fullDuration !== null && timestamp > fullDuration) {
98
+ return true;
99
+ }
100
+ return (oldestTimestamp < timestamp && Math.abs(oldestTimestamp - timestamp) < 10);
101
+ };
102
+ const prepareForDeletion = () => {
103
+ cache.deleteAll();
104
+ sampleIterator.return().then((value) => {
105
+ if (value.value) {
106
+ value.value.close();
107
+ }
108
+ });
109
+ fullDuration = null;
110
+ };
111
+ let op = Promise.resolve([]);
112
+ return {
113
+ src,
114
+ getSamples: (ts, dur) => {
115
+ op = op.then(() => getSamples(ts, dur));
116
+ return op;
117
+ },
118
+ waitForCompletion: async () => {
119
+ await op;
120
+ return true;
121
+ },
122
+ canSatisfyRequestedTime,
123
+ logOpenFrames,
124
+ getCacheStats,
125
+ getLastUsed: () => lastUsed,
126
+ prepareForDeletion,
127
+ startTimestamp,
128
+ clearBeforeThreshold: cache.clearBeforeThreshold,
129
+ getOldestTimestamp: cache.getOldestTimestamp,
130
+ getNewestTimestamp: cache.getNewestTimestamp,
131
+ };
132
+ };
@@ -0,0 +1,113 @@
1
+ import { Internals } from 'remotion';
2
+ import { getTotalCacheStats } from '../caches';
3
+ import { makeAudioIterator } from './audio-iterator';
4
+ export const makeAudioManager = () => {
5
+ const iterators = [];
6
+ const makeIterator = ({ timeInSeconds, src, audioSampleSink, isMatroska, actualMatroskaTimestamps, logLevel, }) => {
7
+ const iterator = makeAudioIterator({
8
+ audioSampleSink,
9
+ isMatroska,
10
+ startTimestamp: timeInSeconds,
11
+ src,
12
+ actualMatroskaTimestamps,
13
+ logLevel,
14
+ });
15
+ iterators.push(iterator);
16
+ return iterator;
17
+ };
18
+ const getIteratorMostInThePast = () => {
19
+ let mostInThePast = null;
20
+ let mostInThePastIterator = null;
21
+ for (const iterator of iterators) {
22
+ const lastUsed = iterator.getLastUsed();
23
+ if (mostInThePast === null || lastUsed < mostInThePast) {
24
+ mostInThePast = lastUsed;
25
+ mostInThePastIterator = iterator;
26
+ }
27
+ }
28
+ return mostInThePastIterator;
29
+ };
30
+ const deleteOldestIterator = () => {
31
+ const iterator = getIteratorMostInThePast();
32
+ if (iterator) {
33
+ iterator.prepareForDeletion();
34
+ iterators.splice(iterators.indexOf(iterator), 1);
35
+ }
36
+ };
37
+ const deleteDuplicateIterators = (logLevel) => {
38
+ const seenKeys = new Set();
39
+ for (let i = 0; i < iterators.length; i++) {
40
+ const iterator = iterators[i];
41
+ const key = `${iterator.src}-${iterator.getOldestTimestamp()}-${iterator.getNewestTimestamp()}`;
42
+ if (seenKeys.has(key)) {
43
+ iterator.prepareForDeletion();
44
+ iterators.splice(iterators.indexOf(iterator), 1);
45
+ Internals.Log.verbose({ logLevel, tag: '@remotion/media' }, `Deleted duplicate iterator for ${iterator.src}`);
46
+ }
47
+ seenKeys.add(key);
48
+ }
49
+ };
50
+ const getIterator = async ({ src, timeInSeconds, audioSampleSink, isMatroska, actualMatroskaTimestamps, logLevel, maxCacheSize, }) => {
51
+ while ((await getTotalCacheStats()).totalSize > maxCacheSize) {
52
+ deleteOldestIterator();
53
+ }
54
+ for (const iterator of iterators) {
55
+ if (iterator.src === src &&
56
+ (await iterator.waitForCompletion()) &&
57
+ iterator.canSatisfyRequestedTime(timeInSeconds)) {
58
+ return iterator;
59
+ }
60
+ }
61
+ for (let i = 0; i < iterators.length; i++) {
62
+ const iterator = iterators[i];
63
+ // delete iterator with same starting timestamp as requested
64
+ if (iterator.src === src && iterator.startTimestamp === timeInSeconds) {
65
+ iterator.prepareForDeletion();
66
+ iterators.splice(iterators.indexOf(iterator), 1);
67
+ }
68
+ }
69
+ deleteDuplicateIterators(logLevel);
70
+ return makeIterator({
71
+ src,
72
+ timeInSeconds,
73
+ audioSampleSink,
74
+ isMatroska,
75
+ actualMatroskaTimestamps,
76
+ logLevel,
77
+ });
78
+ };
79
+ const getCacheStats = () => {
80
+ let totalCount = 0;
81
+ let totalSize = 0;
82
+ for (const iterator of iterators) {
83
+ const { count, size } = iterator.getCacheStats();
84
+ totalCount += count;
85
+ totalSize += size;
86
+ }
87
+ return { count: totalCount, totalSize };
88
+ };
89
+ const logOpenFrames = () => {
90
+ for (const iterator of iterators) {
91
+ iterator.logOpenFrames();
92
+ }
93
+ };
94
+ let queue = Promise.resolve(undefined);
95
+ return {
96
+ getIterator: ({ src, timeInSeconds, audioSampleSink, isMatroska, actualMatroskaTimestamps, logLevel, maxCacheSize, }) => {
97
+ queue = queue.then(() => getIterator({
98
+ src,
99
+ timeInSeconds,
100
+ audioSampleSink,
101
+ isMatroska,
102
+ actualMatroskaTimestamps,
103
+ logLevel,
104
+ maxCacheSize,
105
+ }));
106
+ return queue;
107
+ },
108
+ getCacheStats,
109
+ getIteratorMostInThePast,
110
+ logOpenFrames,
111
+ deleteDuplicateIterators,
112
+ };
113
+ };
@@ -0,0 +1,132 @@
1
+ import { audioManager } from '../caches';
2
+ import { combineAudioDataAndClosePrevious } from '../convert-audiodata/combine-audiodata';
3
+ import { convertAudioData, fixFloatingPoint, } from '../convert-audiodata/convert-audiodata';
4
+ import { TARGET_NUMBER_OF_CHANNELS, TARGET_SAMPLE_RATE, } from '../convert-audiodata/resample-audiodata';
5
+ import { getSink } from '../get-sink';
6
+ import { getTimeInSeconds } from '../get-time-in-seconds';
7
+ import { isNetworkError, isUnsupportedConfigurationError, } from '../is-type-of-error';
8
+ const extractAudioInternal = async ({ src, timeInSeconds: unloopedTimeInSeconds, durationInSeconds: durationNotYetApplyingPlaybackRate, logLevel, loop, playbackRate, audioStreamIndex, trimBefore, trimAfter, fps, maxCacheSize, }) => {
9
+ const { getAudio, actualMatroskaTimestamps, isMatroska, getDuration } = await getSink(src, logLevel);
10
+ let mediaDurationInSeconds = null;
11
+ if (loop) {
12
+ mediaDurationInSeconds = await getDuration();
13
+ }
14
+ const audio = await getAudio(audioStreamIndex);
15
+ if (audio === 'network-error') {
16
+ return 'network-error';
17
+ }
18
+ if (audio === 'no-audio-track') {
19
+ return { data: null, durationInSeconds: null };
20
+ }
21
+ if (audio === 'cannot-decode-audio') {
22
+ return 'cannot-decode';
23
+ }
24
+ if (audio === 'unknown-container-format') {
25
+ return 'unknown-container-format';
26
+ }
27
+ const timeInSeconds = getTimeInSeconds({
28
+ loop,
29
+ mediaDurationInSeconds,
30
+ unloopedTimeInSeconds,
31
+ src,
32
+ trimAfter,
33
+ playbackRate,
34
+ trimBefore,
35
+ fps,
36
+ ifNoMediaDuration: 'fail',
37
+ });
38
+ if (timeInSeconds === null) {
39
+ return { data: null, durationInSeconds: mediaDurationInSeconds };
40
+ }
41
+ try {
42
+ const sampleIterator = await audioManager.getIterator({
43
+ src,
44
+ timeInSeconds,
45
+ audioSampleSink: audio.sampleSink,
46
+ isMatroska,
47
+ actualMatroskaTimestamps,
48
+ logLevel,
49
+ maxCacheSize,
50
+ });
51
+ const durationInSeconds = durationNotYetApplyingPlaybackRate * playbackRate;
52
+ const samples = await sampleIterator.getSamples(timeInSeconds, durationInSeconds);
53
+ audioManager.logOpenFrames();
54
+ const audioDataArray = [];
55
+ for (let i = 0; i < samples.length; i++) {
56
+ const sample = samples[i];
57
+ // Less than 1 sample would be included - we did not need it after all!
58
+ if (Math.abs(sample.timestamp - (timeInSeconds + durationInSeconds)) *
59
+ sample.sampleRate <
60
+ 1) {
61
+ continue;
62
+ }
63
+ // Less than 1 sample would be included - we did not need it after all!
64
+ if (sample.timestamp + sample.duration <= timeInSeconds) {
65
+ continue;
66
+ }
67
+ const isFirstSample = i === 0;
68
+ const isLastSample = i === samples.length - 1;
69
+ const audioDataRaw = sample.toAudioData();
70
+ // amount of samples to shave from start and end
71
+ let trimStartInSeconds = 0;
72
+ let trimEndInSeconds = 0;
73
+ let leadingSilence = null;
74
+ if (isFirstSample) {
75
+ trimStartInSeconds = fixFloatingPoint(timeInSeconds - sample.timestamp);
76
+ if (trimStartInSeconds < 0) {
77
+ const silenceFrames = Math.ceil(fixFloatingPoint(-trimStartInSeconds * TARGET_SAMPLE_RATE));
78
+ leadingSilence = {
79
+ data: new Int16Array(silenceFrames * TARGET_NUMBER_OF_CHANNELS),
80
+ numberOfFrames: silenceFrames,
81
+ timestamp: timeInSeconds * 1000000,
82
+ durationInMicroSeconds: (silenceFrames / TARGET_SAMPLE_RATE) * 1000000,
83
+ };
84
+ trimStartInSeconds = 0;
85
+ }
86
+ }
87
+ if (isLastSample) {
88
+ trimEndInSeconds =
89
+ // clamp to 0 in case the audio ends early
90
+ Math.max(0, sample.timestamp +
91
+ sample.duration -
92
+ (timeInSeconds + durationInSeconds));
93
+ }
94
+ const audioData = convertAudioData({
95
+ audioData: audioDataRaw,
96
+ trimStartInSeconds,
97
+ trimEndInSeconds,
98
+ playbackRate,
99
+ audioDataTimestamp: sample.timestamp,
100
+ isLast: isLastSample,
101
+ });
102
+ audioDataRaw.close();
103
+ if (audioData.numberOfFrames === 0) {
104
+ continue;
105
+ }
106
+ if (leadingSilence) {
107
+ audioDataArray.push(leadingSilence);
108
+ }
109
+ audioDataArray.push(audioData);
110
+ }
111
+ if (audioDataArray.length === 0) {
112
+ return { data: null, durationInSeconds: mediaDurationInSeconds };
113
+ }
114
+ const combined = combineAudioDataAndClosePrevious(audioDataArray);
115
+ return { data: combined, durationInSeconds: mediaDurationInSeconds };
116
+ }
117
+ catch (err) {
118
+ const error = err;
119
+ if (isNetworkError(error)) {
120
+ return 'network-error';
121
+ }
122
+ if (isUnsupportedConfigurationError(error)) {
123
+ return 'cannot-decode';
124
+ }
125
+ throw err;
126
+ }
127
+ };
128
+ let queue = Promise.resolve(undefined);
129
+ export const extractAudio = (params) => {
130
+ queue = queue.then(() => extractAudioInternal(params));
131
+ return queue;
132
+ };