@remotion/studio 4.0.452 → 4.0.454

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (34) hide show
  1. package/dist/audio-waveform-worker.d.ts +1 -0
  2. package/dist/audio-waveform-worker.js +102 -0
  3. package/dist/components/AudioWaveform.d.ts +2 -0
  4. package/dist/components/AudioWaveform.js +168 -18
  5. package/dist/components/CurrentAsset.js +13 -5
  6. package/dist/components/Timeline/LoopedIndicator.js +5 -19
  7. package/dist/components/Timeline/TimelineSequence.js +18 -10
  8. package/dist/components/Timeline/TimelineVideoInfo.d.ts +2 -0
  9. package/dist/components/Timeline/TimelineVideoInfo.js +51 -12
  10. package/dist/components/audio-waveform-worker-types.d.ts +28 -0
  11. package/dist/components/audio-waveform-worker-types.js +2 -0
  12. package/dist/components/draw-peaks.d.ts +1 -1
  13. package/dist/components/load-waveform-peaks.d.ts +11 -1
  14. package/dist/components/load-waveform-peaks.js +33 -36
  15. package/dist/components/looped-media-timeline.d.ts +6 -0
  16. package/dist/components/looped-media-timeline.js +14 -0
  17. package/dist/components/slice-waveform-peaks.d.ts +7 -0
  18. package/dist/components/slice-waveform-peaks.js +15 -0
  19. package/dist/components/waveform-peak-processor.d.ts +23 -0
  20. package/dist/components/waveform-peak-processor.js +77 -0
  21. package/dist/esm/audio-waveform-worker.mjs +351 -0
  22. package/dist/esm/{chunk-hxr6txpe.js → chunk-g39hwn0a.js} +434 -108
  23. package/dist/esm/internals.mjs +434 -108
  24. package/dist/esm/previewEntry.mjs +434 -108
  25. package/dist/esm/renderEntry.mjs +1 -1
  26. package/dist/helpers/calculate-timeline.js +16 -0
  27. package/dist/helpers/extract-frames.js +12 -3
  28. package/dist/helpers/get-duration-or-compute.d.ts +2 -0
  29. package/dist/helpers/get-duration-or-compute.js +10 -0
  30. package/dist/helpers/get-timeline-nestedness.js +2 -1
  31. package/dist/helpers/use-max-media-duration.js +2 -2
  32. package/dist/make-audio-waveform-worker.d.ts +1 -0
  33. package/dist/make-audio-waveform-worker.js +10 -0
  34. package/package.json +19 -10
@@ -9,6 +9,7 @@ const frame_database_1 = require("../../helpers/frame-database");
9
9
  const resize_video_frame_1 = require("../../helpers/resize-video-frame");
10
10
  const timeline_layout_1 = require("../../helpers/timeline-layout");
11
11
  const AudioWaveform_1 = require("../AudioWaveform");
12
+ const looped_media_timeline_1 = require("../looped-media-timeline");
12
13
  const FILMSTRIP_HEIGHT = timeline_layout_1.TIMELINE_LAYER_HEIGHT_IMAGE - 2;
13
14
  const outerStyle = {
14
15
  width: '100%',
@@ -146,7 +147,7 @@ const fillFrameWhereItFits = ({ frame, filledSlots, ctx, visualizationWidth, seg
146
147
  });
147
148
  }
148
149
  };
149
- const TimelineVideoInfo = ({ src, visualizationWidth, naturalWidth, trimBefore, durationInFrames, playbackRate, volume, doesVolumeChange, premountWidth, postmountWidth, }) => {
150
+ const TimelineVideoInfo = ({ src, visualizationWidth, naturalWidth, trimBefore, durationInFrames, playbackRate, volume, doesVolumeChange, premountWidth, postmountWidth, loopDisplay, }) => {
150
151
  const { fps } = (0, remotion_1.useVideoConfig)();
151
152
  const ref = (0, react_1.useRef)(null);
152
153
  const [error, setError] = (0, react_1.useState)(null);
@@ -169,28 +170,63 @@ const TimelineVideoInfo = ({ src, visualizationWidth, naturalWidth, trimBefore,
169
170
  return;
170
171
  }
171
172
  current.appendChild(canvas);
173
+ const loopWidth = (0, looped_media_timeline_1.getLoopDisplayWidth)({
174
+ visualizationWidth: naturalWidth,
175
+ loopDisplay,
176
+ });
177
+ const shouldRepeatVideo = (0, looped_media_timeline_1.shouldTileLoopDisplay)(loopDisplay);
178
+ const targetCanvas = shouldRepeatVideo
179
+ ? document.createElement('canvas')
180
+ : canvas;
181
+ targetCanvas.width = shouldRepeatVideo
182
+ ? Math.max(1, Math.ceil(loopWidth))
183
+ : canvas.width;
184
+ targetCanvas.height = canvas.height;
185
+ const targetCtx = shouldRepeatVideo ? targetCanvas.getContext('2d') : ctx;
186
+ if (!targetCtx) {
187
+ current.removeChild(canvas);
188
+ return;
189
+ }
190
+ const repeatTarget = () => {
191
+ if (!shouldRepeatVideo) {
192
+ return;
193
+ }
194
+ const pattern = ctx.createPattern(targetCanvas, 'repeat-x');
195
+ if (!pattern) {
196
+ return;
197
+ }
198
+ pattern.setTransform(new DOMMatrix().scaleSelf(loopWidth / targetCanvas.width, 1));
199
+ ctx.clearRect(0, 0, canvas.width, canvas.height);
200
+ ctx.fillStyle = pattern;
201
+ ctx.fillRect(0, 0, canvas.width, canvas.height);
202
+ };
172
203
  // desired-timestamp -> filled-timestamp
173
204
  const filledSlots = new Map();
174
205
  const fromSeconds = trimBefore / fps;
206
+ const visibleDurationInFrames = shouldRepeatVideo && loopDisplay
207
+ ? loopDisplay.durationInFrames
208
+ : durationInFrames;
175
209
  // Trim is applied first, then playbackRate. Each composition frame
176
210
  // advances the source video by `playbackRate` source frames.
177
- const toSeconds = fromSeconds + (durationInFrames * playbackRate) / fps;
211
+ const toSeconds = fromSeconds + (visibleDurationInFrames * playbackRate) / fps;
212
+ const targetWidth = shouldRepeatVideo ? targetCanvas.width : naturalWidth;
178
213
  if (aspectRatio.current !== null) {
179
214
  ensureSlots({
180
215
  filledSlots,
181
- naturalWidth,
216
+ naturalWidth: targetWidth,
182
217
  fromSeconds,
183
218
  toSeconds,
184
219
  aspectRatio: aspectRatio.current,
185
220
  });
186
221
  fillWithCachedFrames({
187
- ctx,
188
- naturalWidth,
222
+ ctx: targetCtx,
223
+ naturalWidth: targetWidth,
189
224
  filledSlots,
190
225
  src,
191
226
  segmentDuration: toSeconds - fromSeconds,
192
227
  fromSeconds,
193
228
  });
229
+ repeatTarget();
194
230
  const unfilled = Array.from(filledSlots.keys()).filter((timestamp) => !filledSlots.get(timestamp));
195
231
  // Don't extract frames if all slots are filled
196
232
  if (unfilled.length === 0) {
@@ -207,7 +243,7 @@ const TimelineVideoInfo = ({ src, visualizationWidth, naturalWidth, trimBefore,
207
243
  filledSlots,
208
244
  fromSeconds,
209
245
  toSeconds,
210
- naturalWidth,
246
+ naturalWidth: targetWidth,
211
247
  aspectRatio: aspectRatio.current,
212
248
  });
213
249
  return Array.from(filledSlots.keys()).map((timestamp) => timestamp / WEBCODECS_TIMESCALE);
@@ -235,17 +271,18 @@ const TimelineVideoInfo = ({ src, visualizationWidth, naturalWidth, trimBefore,
235
271
  filledSlots,
236
272
  fromSeconds,
237
273
  toSeconds,
238
- naturalWidth,
274
+ naturalWidth: targetWidth,
239
275
  aspectRatio: aspectRatio.current,
240
276
  });
241
277
  fillFrameWhereItFits({
242
- ctx,
278
+ ctx: targetCtx,
243
279
  filledSlots,
244
- visualizationWidth: naturalWidth,
280
+ visualizationWidth: targetWidth,
245
281
  frame: transformed,
246
282
  segmentDuration: toSeconds - fromSeconds,
247
283
  fromSeconds,
248
284
  });
285
+ repeatTarget();
249
286
  }
250
287
  catch (e) {
251
288
  if (frame) {
@@ -264,13 +301,14 @@ const TimelineVideoInfo = ({ src, visualizationWidth, naturalWidth, trimBefore,
264
301
  return;
265
302
  }
266
303
  fillWithCachedFrames({
267
- ctx,
268
- naturalWidth,
304
+ ctx: targetCtx,
305
+ naturalWidth: targetWidth,
269
306
  filledSlots,
270
307
  src,
271
308
  segmentDuration: toSeconds - fromSeconds,
272
309
  fromSeconds,
273
310
  });
311
+ repeatTarget();
274
312
  })
275
313
  .catch((e) => {
276
314
  setError(e);
@@ -283,6 +321,7 @@ const TimelineVideoInfo = ({ src, visualizationWidth, naturalWidth, trimBefore,
283
321
  durationInFrames,
284
322
  error,
285
323
  fps,
324
+ loopDisplay,
286
325
  naturalWidth,
287
326
  playbackRate,
288
327
  src,
@@ -299,7 +338,7 @@ const TimelineVideoInfo = ({ src, visualizationWidth, naturalWidth, trimBefore,
299
338
  };
300
339
  }, [audioWidth, premountWidth]);
301
340
  return (jsx_runtime_1.jsxs("div", { style: outerStyle, children: [
302
- jsx_runtime_1.jsx("div", { ref: ref, style: filmstripContainerStyle }), jsx_runtime_1.jsx("div", { style: audioStyle, children: jsx_runtime_1.jsx(AudioWaveform_1.AudioWaveform, { src: src, visualizationWidth: audioWidth, startFrom: trimBefore, durationInFrames: durationInFrames, volume: volume, doesVolumeChange: doesVolumeChange, playbackRate: playbackRate }) })
341
+ jsx_runtime_1.jsx("div", { ref: ref, style: filmstripContainerStyle }), jsx_runtime_1.jsx("div", { style: audioStyle, children: jsx_runtime_1.jsx(AudioWaveform_1.AudioWaveform, { src: src, visualizationWidth: audioWidth, startFrom: trimBefore, durationInFrames: durationInFrames, volume: volume, doesVolumeChange: doesVolumeChange, playbackRate: playbackRate, loopDisplay: loopDisplay }) })
303
342
  ] }));
304
343
  };
305
344
  exports.TimelineVideoInfo = TimelineVideoInfo;
@@ -0,0 +1,28 @@
1
+ import type { LoopDisplay } from 'remotion';
2
+ export type AudioWaveformWorkerInitMessage = {
3
+ readonly type: 'init';
4
+ readonly canvas: OffscreenCanvas;
5
+ };
6
+ export type AudioWaveformWorkerRenderMessage = {
7
+ readonly type: 'render';
8
+ readonly requestId: number;
9
+ readonly src: string;
10
+ readonly width: number;
11
+ readonly height: number;
12
+ readonly volume: number;
13
+ readonly startFrom: number;
14
+ readonly durationInFrames: number;
15
+ readonly fps: number;
16
+ readonly playbackRate: number;
17
+ readonly loopDisplay: LoopDisplay | undefined;
18
+ };
19
+ export type AudioWaveformWorkerDisposeMessage = {
20
+ readonly type: 'dispose';
21
+ };
22
+ export type AudioWaveformWorkerIncomingMessage = AudioWaveformWorkerInitMessage | AudioWaveformWorkerRenderMessage | AudioWaveformWorkerDisposeMessage;
23
+ export type AudioWaveformWorkerErrorMessage = {
24
+ readonly type: 'error';
25
+ readonly requestId: number;
26
+ readonly message: string;
27
+ };
28
+ export type AudioWaveformWorkerOutgoingMessage = AudioWaveformWorkerErrorMessage;
@@ -0,0 +1,2 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
@@ -1 +1 @@
1
- export declare const drawBars: (canvas: HTMLCanvasElement, peaks: Float32Array<ArrayBufferLike>, color: string, volume: number, width: number) => void;
1
+ export declare const drawBars: (canvas: HTMLCanvasElement | OffscreenCanvas, peaks: Float32Array<ArrayBufferLike>, color: string, volume: number, width: number) => void;
@@ -1,3 +1,13 @@
1
1
  declare const TARGET_SAMPLE_RATE = 100;
2
2
  export { TARGET_SAMPLE_RATE };
3
- export declare function loadWaveformPeaks(url: string, signal: AbortSignal): Promise<Float32Array>;
3
+ type Progress = {
4
+ readonly peaks: Float32Array;
5
+ readonly completedPeaks: number;
6
+ readonly totalPeaks: number;
7
+ readonly final: boolean;
8
+ };
9
+ type LoadWaveformPeaksOptions = {
10
+ readonly onProgress?: (progress: Progress) => void;
11
+ readonly progressIntervalInMs?: number;
12
+ };
13
+ export declare function loadWaveformPeaks(url: string, signal: AbortSignal, options?: LoadWaveformPeaksOptions): Promise<Float32Array>;
@@ -3,14 +3,24 @@ Object.defineProperty(exports, "__esModule", { value: true });
3
3
  exports.TARGET_SAMPLE_RATE = void 0;
4
4
  exports.loadWaveformPeaks = loadWaveformPeaks;
5
5
  const mediabunny_1 = require("mediabunny");
6
+ const waveform_peak_processor_1 = require("./waveform-peak-processor");
6
7
  const TARGET_SAMPLE_RATE = 100;
7
8
  exports.TARGET_SAMPLE_RATE = TARGET_SAMPLE_RATE;
9
+ const DEFAULT_PROGRESS_INTERVAL_IN_MS = 50;
8
10
  const peaksCache = new Map();
9
- async function loadWaveformPeaks(url, signal) {
10
- var _a;
11
+ async function loadWaveformPeaks(url, signal, options) {
12
+ var _a, _b;
11
13
  const cached = peaksCache.get(url);
12
- if (cached)
14
+ if (cached) {
15
+ (0, waveform_peak_processor_1.emitWaveformProgress)({
16
+ peaks: cached,
17
+ completedPeaks: cached.length,
18
+ totalPeaks: cached.length,
19
+ final: true,
20
+ onProgress: options === null || options === void 0 ? void 0 : options.onProgress,
21
+ });
13
22
  return cached;
23
+ }
14
24
  const input = new mediabunny_1.Input({
15
25
  formats: mediabunny_1.ALL_FORMATS,
16
26
  source: new mediabunny_1.UrlSource(url),
@@ -20,15 +30,26 @@ async function loadWaveformPeaks(url, signal) {
20
30
  if (!audioTrack) {
21
31
  return new Float32Array(0);
22
32
  }
23
- const { sampleRate } = audioTrack;
24
- const durationInSeconds = await audioTrack.computeDuration();
33
+ if (await audioTrack.isLive()) {
34
+ throw new Error('Live streams are not currently supported by Remotion. Sorry! Source: ' +
35
+ url);
36
+ }
37
+ if (await audioTrack.isRelativeToUnixEpoch()) {
38
+ throw new Error('Streams with UNIX timestamps are not currently supported by Remotion. Sorry! Source: ' +
39
+ url);
40
+ }
41
+ const sampleRate = await audioTrack.getSampleRate();
42
+ const durationInSeconds = (_a = (await audioTrack.getDurationFromMetadata({ skipLiveWait: true }))) !== null && _a !== void 0 ? _a : (await audioTrack.computeDuration({ skipLiveWait: true }));
25
43
  const totalPeaks = Math.ceil(durationInSeconds * TARGET_SAMPLE_RATE);
26
44
  const samplesPerPeak = Math.max(1, Math.floor(sampleRate / TARGET_SAMPLE_RATE));
27
- const peaks = new Float32Array(totalPeaks);
28
- let peakIndex = 0;
29
- let peakMax = 0;
30
- let sampleInPeak = 0;
31
45
  const sink = new mediabunny_1.AudioSampleSink(audioTrack);
46
+ const processor = (0, waveform_peak_processor_1.createWaveformPeakProcessor)({
47
+ totalPeaks,
48
+ samplesPerPeak,
49
+ onProgress: options === null || options === void 0 ? void 0 : options.onProgress,
50
+ progressIntervalInMs: (_b = options === null || options === void 0 ? void 0 : options.progressIntervalInMs) !== null && _b !== void 0 ? _b : DEFAULT_PROGRESS_INTERVAL_IN_MS,
51
+ now: () => Date.now(),
52
+ });
32
53
  for await (const sample of sink.samples()) {
33
54
  if (signal.aborted) {
34
55
  sample.close();
@@ -41,35 +62,11 @@ async function loadWaveformPeaks(url, signal) {
41
62
  const floats = new Float32Array(bytesNeeded / 4);
42
63
  sample.copyTo(floats, { format: 'f32', planeIndex: 0 });
43
64
  const channels = Math.max(1, sample.numberOfChannels);
44
- const frames = sample.numberOfFrames;
45
65
  sample.close();
46
- for (let frame = 0; frame < frames; frame++) {
47
- // `f32` copies are interleaved, so timing must advance per frame, not per float.
48
- let framePeak = 0;
49
- for (let channel = 0; channel < channels; channel++) {
50
- const sampleIndex = frame * channels + channel;
51
- const abs = Math.abs((_a = floats[sampleIndex]) !== null && _a !== void 0 ? _a : 0);
52
- if (abs > framePeak) {
53
- framePeak = abs;
54
- }
55
- }
56
- if (framePeak > peakMax) {
57
- peakMax = framePeak;
58
- }
59
- sampleInPeak++;
60
- if (sampleInPeak >= samplesPerPeak) {
61
- if (peakIndex < totalPeaks) {
62
- peaks[peakIndex] = peakMax;
63
- }
64
- peakIndex++;
65
- peakMax = 0;
66
- sampleInPeak = 0;
67
- }
68
- }
69
- }
70
- if (sampleInPeak > 0 && peakIndex < totalPeaks) {
71
- peaks[peakIndex] = peakMax;
66
+ processor.processSampleChunk(floats, channels);
72
67
  }
68
+ processor.finalize();
69
+ const { peaks } = processor;
73
70
  peaksCache.set(url, peaks);
74
71
  return peaks;
75
72
  }
@@ -0,0 +1,6 @@
1
+ import type { LoopDisplay } from 'remotion';
2
+ export declare const shouldTileLoopDisplay: (loopDisplay: LoopDisplay | undefined) => loopDisplay is LoopDisplay;
3
+ export declare const getLoopDisplayWidth: ({ visualizationWidth, loopDisplay, }: {
4
+ visualizationWidth: number;
5
+ loopDisplay: LoopDisplay | undefined;
6
+ }) => number;
@@ -0,0 +1,14 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.getLoopDisplayWidth = exports.shouldTileLoopDisplay = void 0;
4
+ const shouldTileLoopDisplay = (loopDisplay) => {
5
+ return loopDisplay !== undefined && loopDisplay.numberOfTimes > 1;
6
+ };
7
+ exports.shouldTileLoopDisplay = shouldTileLoopDisplay;
8
+ const getLoopDisplayWidth = ({ visualizationWidth, loopDisplay, }) => {
9
+ if (!(0, exports.shouldTileLoopDisplay)(loopDisplay)) {
10
+ return visualizationWidth;
11
+ }
12
+ return visualizationWidth / loopDisplay.numberOfTimes;
13
+ };
14
+ exports.getLoopDisplayWidth = getLoopDisplayWidth;
@@ -0,0 +1,7 @@
1
+ export declare const sliceWaveformPeaks: ({ durationInFrames, fps, peaks, playbackRate, startFrom, }: {
2
+ readonly peaks: Float32Array<ArrayBufferLike>;
3
+ readonly startFrom: number;
4
+ readonly durationInFrames: number;
5
+ readonly fps: number;
6
+ readonly playbackRate: number;
7
+ }) => Float32Array<ArrayBufferLike>;
@@ -0,0 +1,15 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.sliceWaveformPeaks = void 0;
4
+ const load_waveform_peaks_1 = require("./load-waveform-peaks");
5
+ const sliceWaveformPeaks = ({ durationInFrames, fps, peaks, playbackRate, startFrom, }) => {
6
+ if (peaks.length === 0) {
7
+ return peaks;
8
+ }
9
+ const startTimeInSeconds = startFrom / fps;
10
+ const durationInSeconds = (durationInFrames / fps) * playbackRate;
11
+ const startPeakIndex = Math.floor(startTimeInSeconds * load_waveform_peaks_1.TARGET_SAMPLE_RATE);
12
+ const endPeakIndex = Math.ceil((startTimeInSeconds + durationInSeconds) * load_waveform_peaks_1.TARGET_SAMPLE_RATE);
13
+ return peaks.subarray(Math.max(0, startPeakIndex), Math.min(peaks.length, endPeakIndex));
14
+ };
15
+ exports.sliceWaveformPeaks = sliceWaveformPeaks;
@@ -0,0 +1,23 @@
1
+ type Progress = {
2
+ readonly peaks: Float32Array;
3
+ readonly completedPeaks: number;
4
+ readonly totalPeaks: number;
5
+ readonly final: boolean;
6
+ };
7
+ type WaveformPeakProcessorOptions = {
8
+ readonly totalPeaks: number;
9
+ readonly samplesPerPeak: number;
10
+ readonly onProgress?: (progress: Progress) => void;
11
+ readonly progressIntervalInMs: number;
12
+ readonly now: () => number;
13
+ };
14
+ type WaveformPeakProcessor = {
15
+ readonly peaks: Float32Array;
16
+ processSampleChunk: (floats: Float32Array, channels: number) => void;
17
+ finalize: () => void;
18
+ };
19
+ export declare const emitWaveformProgress: ({ completedPeaks, final, onProgress, peaks, totalPeaks, }: Progress & {
20
+ readonly onProgress?: ((progress: Progress) => void) | undefined;
21
+ }) => void;
22
+ export declare const createWaveformPeakProcessor: ({ totalPeaks, samplesPerPeak, onProgress, progressIntervalInMs, now, }: WaveformPeakProcessorOptions) => WaveformPeakProcessor;
23
+ export {};
@@ -0,0 +1,77 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.createWaveformPeakProcessor = exports.emitWaveformProgress = void 0;
4
+ const emitWaveformProgress = ({ completedPeaks, final, onProgress, peaks, totalPeaks, }) => {
5
+ onProgress === null || onProgress === void 0 ? void 0 : onProgress({
6
+ peaks,
7
+ completedPeaks,
8
+ totalPeaks,
9
+ final,
10
+ });
11
+ };
12
+ exports.emitWaveformProgress = emitWaveformProgress;
13
+ const createWaveformPeakProcessor = ({ totalPeaks, samplesPerPeak, onProgress, progressIntervalInMs, now, }) => {
14
+ const peaks = new Float32Array(totalPeaks);
15
+ let peakIndex = 0;
16
+ let peakMax = 0;
17
+ let sampleInPeak = 0;
18
+ let lastProgressAt = 0;
19
+ let lastProgressPeak = 0;
20
+ const emitProgress = (force) => {
21
+ const timestamp = now();
22
+ if (!force && peakIndex === lastProgressPeak && sampleInPeak === 0) {
23
+ return;
24
+ }
25
+ if (!force && timestamp - lastProgressAt < progressIntervalInMs) {
26
+ return;
27
+ }
28
+ lastProgressAt = timestamp;
29
+ lastProgressPeak = peakIndex;
30
+ (0, exports.emitWaveformProgress)({
31
+ peaks,
32
+ completedPeaks: peakIndex,
33
+ totalPeaks,
34
+ final: force,
35
+ onProgress,
36
+ });
37
+ };
38
+ return {
39
+ peaks,
40
+ processSampleChunk: (floats, channels) => {
41
+ var _a;
42
+ const frameCount = Math.floor(floats.length / Math.max(1, channels));
43
+ for (let frame = 0; frame < frameCount; frame++) {
44
+ // `f32` copies are interleaved, so timing advances per frame.
45
+ let framePeak = 0;
46
+ for (let channel = 0; channel < channels; channel++) {
47
+ const sampleIndex = frame * channels + channel;
48
+ const abs = Math.abs((_a = floats[sampleIndex]) !== null && _a !== void 0 ? _a : 0);
49
+ if (abs > framePeak) {
50
+ framePeak = abs;
51
+ }
52
+ }
53
+ if (framePeak > peakMax) {
54
+ peakMax = framePeak;
55
+ }
56
+ sampleInPeak++;
57
+ if (sampleInPeak >= samplesPerPeak) {
58
+ if (peakIndex < totalPeaks) {
59
+ peaks[peakIndex] = peakMax;
60
+ }
61
+ peakIndex++;
62
+ peakMax = 0;
63
+ sampleInPeak = 0;
64
+ }
65
+ }
66
+ emitProgress(false);
67
+ },
68
+ finalize: () => {
69
+ if (sampleInPeak > 0 && peakIndex < totalPeaks) {
70
+ peaks[peakIndex] = peakMax;
71
+ peakIndex++;
72
+ }
73
+ emitProgress(true);
74
+ },
75
+ };
76
+ };
77
+ exports.createWaveformPeakProcessor = createWaveformPeakProcessor;