@remotion/studio 4.0.452 → 4.0.454

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (34) hide show
  1. package/dist/audio-waveform-worker.d.ts +1 -0
  2. package/dist/audio-waveform-worker.js +102 -0
  3. package/dist/components/AudioWaveform.d.ts +2 -0
  4. package/dist/components/AudioWaveform.js +168 -18
  5. package/dist/components/CurrentAsset.js +13 -5
  6. package/dist/components/Timeline/LoopedIndicator.js +5 -19
  7. package/dist/components/Timeline/TimelineSequence.js +18 -10
  8. package/dist/components/Timeline/TimelineVideoInfo.d.ts +2 -0
  9. package/dist/components/Timeline/TimelineVideoInfo.js +51 -12
  10. package/dist/components/audio-waveform-worker-types.d.ts +28 -0
  11. package/dist/components/audio-waveform-worker-types.js +2 -0
  12. package/dist/components/draw-peaks.d.ts +1 -1
  13. package/dist/components/load-waveform-peaks.d.ts +11 -1
  14. package/dist/components/load-waveform-peaks.js +33 -36
  15. package/dist/components/looped-media-timeline.d.ts +6 -0
  16. package/dist/components/looped-media-timeline.js +14 -0
  17. package/dist/components/slice-waveform-peaks.d.ts +7 -0
  18. package/dist/components/slice-waveform-peaks.js +15 -0
  19. package/dist/components/waveform-peak-processor.d.ts +23 -0
  20. package/dist/components/waveform-peak-processor.js +77 -0
  21. package/dist/esm/audio-waveform-worker.mjs +351 -0
  22. package/dist/esm/{chunk-hxr6txpe.js → chunk-g39hwn0a.js} +434 -108
  23. package/dist/esm/internals.mjs +434 -108
  24. package/dist/esm/previewEntry.mjs +434 -108
  25. package/dist/esm/renderEntry.mjs +1 -1
  26. package/dist/helpers/calculate-timeline.js +16 -0
  27. package/dist/helpers/extract-frames.js +12 -3
  28. package/dist/helpers/get-duration-or-compute.d.ts +2 -0
  29. package/dist/helpers/get-duration-or-compute.js +10 -0
  30. package/dist/helpers/get-timeline-nestedness.js +2 -1
  31. package/dist/helpers/use-max-media-duration.js +2 -2
  32. package/dist/make-audio-waveform-worker.d.ts +1 -0
  33. package/dist/make-audio-waveform-worker.js +10 -0
  34. package/package.json +19 -10
@@ -0,0 +1,351 @@
1
+ // src/components/parse-color.ts
2
+ var colorCache = new Map;
3
+ var parseColor = (color) => {
4
+ const cached = colorCache.get(color);
5
+ if (cached)
6
+ return cached;
7
+ const ctx = new OffscreenCanvas(1, 1).getContext("2d");
8
+ ctx.fillStyle = color;
9
+ ctx.fillRect(0, 0, 1, 1);
10
+ const [r, g, b, a] = ctx.getImageData(0, 0, 1, 1).data;
11
+ const result = [r, g, b, a];
12
+ colorCache.set(color, result);
13
+ return result;
14
+ };
15
+
16
+ // src/components/draw-peaks.ts
17
+ var CLIPPING_COLOR = "#FF7F50";
18
+ var drawBars = (canvas, peaks, color, volume, width) => {
19
+ const ctx = canvas.getContext("2d");
20
+ if (!ctx) {
21
+ throw new Error("Failed to get canvas context");
22
+ }
23
+ const { height } = canvas;
24
+ const w = canvas.width;
25
+ ctx.clearRect(0, 0, w, height);
26
+ if (volume === 0)
27
+ return;
28
+ const [r, g, b, a] = parseColor(color);
29
+ const [cr, cg, cb, ca] = parseColor(CLIPPING_COLOR);
30
+ const imageData = ctx.createImageData(w, height);
31
+ const { data } = imageData;
32
+ const numBars = width;
33
+ for (let barIndex = 0;barIndex < numBars; barIndex++) {
34
+ const x = barIndex;
35
+ if (x >= w)
36
+ break;
37
+ const peakIndex = Math.floor(barIndex / numBars * peaks.length);
38
+ const peak = peaks[peakIndex] || 0;
39
+ const scaledPeak = peak * volume;
40
+ const halfBar = Math.max(0, Math.min(height / 2, scaledPeak * height / 2));
41
+ if (halfBar === 0)
42
+ continue;
43
+ const mid = height / 2;
44
+ const barY = Math.round(mid - halfBar);
45
+ const barEnd = Math.round(mid + halfBar);
46
+ const isClipping = scaledPeak > 1;
47
+ const clipTopEnd = isClipping ? Math.min(barY + 2, barEnd) : barY;
48
+ const clipBotStart = isClipping ? Math.max(barEnd - 2, barY) : barEnd;
49
+ for (let y = barY;y < clipTopEnd; y++) {
50
+ const idx = (y * w + x) * 4;
51
+ data[idx] = cr;
52
+ data[idx + 1] = cg;
53
+ data[idx + 2] = cb;
54
+ data[idx + 3] = ca;
55
+ }
56
+ for (let y = clipTopEnd;y < clipBotStart; y++) {
57
+ const idx = (y * w + x) * 4;
58
+ data[idx] = r;
59
+ data[idx + 1] = g;
60
+ data[idx + 2] = b;
61
+ data[idx + 3] = a;
62
+ }
63
+ for (let y = clipBotStart;y < barEnd; y++) {
64
+ const idx = (y * w + x) * 4;
65
+ data[idx] = cr;
66
+ data[idx + 1] = cg;
67
+ data[idx + 2] = cb;
68
+ data[idx + 3] = ca;
69
+ }
70
+ }
71
+ ctx.putImageData(imageData, 0, 0);
72
+ };
73
+
74
+ // src/components/load-waveform-peaks.ts
75
+ import { ALL_FORMATS, AudioSampleSink, Input, UrlSource } from "mediabunny";
76
+
77
+ // src/components/waveform-peak-processor.ts
78
+ var emitWaveformProgress = ({
79
+ completedPeaks,
80
+ final,
81
+ onProgress,
82
+ peaks,
83
+ totalPeaks
84
+ }) => {
85
+ onProgress?.({
86
+ peaks,
87
+ completedPeaks,
88
+ totalPeaks,
89
+ final
90
+ });
91
+ };
92
+ var createWaveformPeakProcessor = ({
93
+ totalPeaks,
94
+ samplesPerPeak,
95
+ onProgress,
96
+ progressIntervalInMs,
97
+ now
98
+ }) => {
99
+ const peaks = new Float32Array(totalPeaks);
100
+ let peakIndex = 0;
101
+ let peakMax = 0;
102
+ let sampleInPeak = 0;
103
+ let lastProgressAt = 0;
104
+ let lastProgressPeak = 0;
105
+ const emitProgress = (force) => {
106
+ const timestamp = now();
107
+ if (!force && peakIndex === lastProgressPeak && sampleInPeak === 0) {
108
+ return;
109
+ }
110
+ if (!force && timestamp - lastProgressAt < progressIntervalInMs) {
111
+ return;
112
+ }
113
+ lastProgressAt = timestamp;
114
+ lastProgressPeak = peakIndex;
115
+ emitWaveformProgress({
116
+ peaks,
117
+ completedPeaks: peakIndex,
118
+ totalPeaks,
119
+ final: force,
120
+ onProgress
121
+ });
122
+ };
123
+ return {
124
+ peaks,
125
+ processSampleChunk: (floats, channels) => {
126
+ const frameCount = Math.floor(floats.length / Math.max(1, channels));
127
+ for (let frame = 0;frame < frameCount; frame++) {
128
+ let framePeak = 0;
129
+ for (let channel = 0;channel < channels; channel++) {
130
+ const sampleIndex = frame * channels + channel;
131
+ const abs = Math.abs(floats[sampleIndex] ?? 0);
132
+ if (abs > framePeak) {
133
+ framePeak = abs;
134
+ }
135
+ }
136
+ if (framePeak > peakMax) {
137
+ peakMax = framePeak;
138
+ }
139
+ sampleInPeak++;
140
+ if (sampleInPeak >= samplesPerPeak) {
141
+ if (peakIndex < totalPeaks) {
142
+ peaks[peakIndex] = peakMax;
143
+ }
144
+ peakIndex++;
145
+ peakMax = 0;
146
+ sampleInPeak = 0;
147
+ }
148
+ }
149
+ emitProgress(false);
150
+ },
151
+ finalize: () => {
152
+ if (sampleInPeak > 0 && peakIndex < totalPeaks) {
153
+ peaks[peakIndex] = peakMax;
154
+ peakIndex++;
155
+ }
156
+ emitProgress(true);
157
+ }
158
+ };
159
+ };
160
+
161
+ // src/components/load-waveform-peaks.ts
162
+ var TARGET_SAMPLE_RATE = 100;
163
+ var DEFAULT_PROGRESS_INTERVAL_IN_MS = 50;
164
+ var peaksCache = new Map;
165
+ async function loadWaveformPeaks(url, signal, options) {
166
+ const cached = peaksCache.get(url);
167
+ if (cached) {
168
+ emitWaveformProgress({
169
+ peaks: cached,
170
+ completedPeaks: cached.length,
171
+ totalPeaks: cached.length,
172
+ final: true,
173
+ onProgress: options?.onProgress
174
+ });
175
+ return cached;
176
+ }
177
+ const input = new Input({
178
+ formats: ALL_FORMATS,
179
+ source: new UrlSource(url)
180
+ });
181
+ try {
182
+ const audioTrack = await input.getPrimaryAudioTrack();
183
+ if (!audioTrack) {
184
+ return new Float32Array(0);
185
+ }
186
+ if (await audioTrack.isLive()) {
187
+ throw new Error("Live streams are not currently supported by Remotion. Sorry! Source: " + url);
188
+ }
189
+ if (await audioTrack.isRelativeToUnixEpoch()) {
190
+ throw new Error("Streams with UNIX timestamps are not currently supported by Remotion. Sorry! Source: " + url);
191
+ }
192
+ const sampleRate = await audioTrack.getSampleRate();
193
+ const durationInSeconds = await audioTrack.getDurationFromMetadata({ skipLiveWait: true }) ?? await audioTrack.computeDuration({ skipLiveWait: true });
194
+ const totalPeaks = Math.ceil(durationInSeconds * TARGET_SAMPLE_RATE);
195
+ const samplesPerPeak = Math.max(1, Math.floor(sampleRate / TARGET_SAMPLE_RATE));
196
+ const sink = new AudioSampleSink(audioTrack);
197
+ const processor = createWaveformPeakProcessor({
198
+ totalPeaks,
199
+ samplesPerPeak,
200
+ onProgress: options?.onProgress,
201
+ progressIntervalInMs: options?.progressIntervalInMs ?? DEFAULT_PROGRESS_INTERVAL_IN_MS,
202
+ now: () => Date.now()
203
+ });
204
+ for await (const sample of sink.samples()) {
205
+ if (signal.aborted) {
206
+ sample.close();
207
+ return new Float32Array(0);
208
+ }
209
+ const bytesNeeded = sample.allocationSize({
210
+ format: "f32",
211
+ planeIndex: 0
212
+ });
213
+ const floats = new Float32Array(bytesNeeded / 4);
214
+ sample.copyTo(floats, { format: "f32", planeIndex: 0 });
215
+ const channels = Math.max(1, sample.numberOfChannels);
216
+ sample.close();
217
+ processor.processSampleChunk(floats, channels);
218
+ }
219
+ processor.finalize();
220
+ const { peaks } = processor;
221
+ peaksCache.set(url, peaks);
222
+ return peaks;
223
+ } finally {
224
+ input.dispose();
225
+ }
226
+ }
227
+
228
+ // src/components/looped-media-timeline.ts
229
+ var shouldTileLoopDisplay = (loopDisplay) => {
230
+ return loopDisplay !== undefined && loopDisplay.numberOfTimes > 1;
231
+ };
232
+ var getLoopDisplayWidth = ({
233
+ visualizationWidth,
234
+ loopDisplay
235
+ }) => {
236
+ if (!shouldTileLoopDisplay(loopDisplay)) {
237
+ return visualizationWidth;
238
+ }
239
+ return visualizationWidth / loopDisplay.numberOfTimes;
240
+ };
241
+
242
+ // src/components/slice-waveform-peaks.ts
243
+ var sliceWaveformPeaks = ({
244
+ durationInFrames,
245
+ fps,
246
+ peaks,
247
+ playbackRate,
248
+ startFrom
249
+ }) => {
250
+ if (peaks.length === 0) {
251
+ return peaks;
252
+ }
253
+ const startTimeInSeconds = startFrom / fps;
254
+ const durationInSeconds = durationInFrames / fps * playbackRate;
255
+ const startPeakIndex = Math.floor(startTimeInSeconds * TARGET_SAMPLE_RATE);
256
+ const endPeakIndex = Math.ceil((startTimeInSeconds + durationInSeconds) * TARGET_SAMPLE_RATE);
257
+ return peaks.subarray(Math.max(0, startPeakIndex), Math.min(peaks.length, endPeakIndex));
258
+ };
259
+
260
+ // src/audio-waveform-worker.ts
261
+ var canvas = null;
262
+ var currentController = null;
263
+ var latestRequestId = 0;
264
+ var postError = (requestId, error) => {
265
+ const message = error instanceof Error ? error.message : "Failed to render waveform";
266
+ const payload = {
267
+ type: "error",
268
+ requestId,
269
+ message
270
+ };
271
+ self.postMessage(payload);
272
+ };
273
+ var drawPartialWaveform = (message, peaks) => {
274
+ if (!canvas) {
275
+ return;
276
+ }
277
+ const portionPeaks = sliceWaveformPeaks({
278
+ durationInFrames: shouldTileLoopDisplay(message.loopDisplay) ? message.loopDisplay.durationInFrames : message.durationInFrames,
279
+ fps: message.fps,
280
+ peaks,
281
+ playbackRate: message.playbackRate,
282
+ startFrom: message.startFrom
283
+ });
284
+ if (!shouldTileLoopDisplay(message.loopDisplay)) {
285
+ drawBars(canvas, portionPeaks, "rgba(255, 255, 255, 0.6)", message.volume, message.width);
286
+ return;
287
+ }
288
+ const loopWidth = getLoopDisplayWidth({
289
+ visualizationWidth: message.width,
290
+ loopDisplay: message.loopDisplay
291
+ });
292
+ const targetCanvas = new OffscreenCanvas(Math.max(1, Math.ceil(loopWidth)), message.height);
293
+ drawBars(targetCanvas, portionPeaks, "rgba(255, 255, 255, 0.6)", message.volume, targetCanvas.width);
294
+ const ctx = canvas.getContext("2d");
295
+ if (!ctx) {
296
+ throw new Error("Failed to get canvas context");
297
+ }
298
+ const pattern = ctx.createPattern(targetCanvas, "repeat-x");
299
+ if (!pattern) {
300
+ return;
301
+ }
302
+ pattern.setTransform(new DOMMatrix().scaleSelf(loopWidth / targetCanvas.width, 1));
303
+ ctx.clearRect(0, 0, message.width, message.height);
304
+ ctx.fillStyle = pattern;
305
+ ctx.fillRect(0, 0, message.width, message.height);
306
+ };
307
+ var renderWaveform = async (message) => {
308
+ if (!canvas) {
309
+ postError(message.requestId, new Error("Waveform canvas not initialized"));
310
+ return;
311
+ }
312
+ const controller = new AbortController;
313
+ currentController?.abort();
314
+ currentController = controller;
315
+ latestRequestId = message.requestId;
316
+ try {
317
+ canvas.width = message.width;
318
+ canvas.height = message.height;
319
+ const peaks = await loadWaveformPeaks(message.src, controller.signal, {
320
+ onProgress: ({ peaks: nextPeaks }) => {
321
+ if (controller.signal.aborted || latestRequestId !== message.requestId) {
322
+ return;
323
+ }
324
+ drawPartialWaveform(message, nextPeaks);
325
+ }
326
+ });
327
+ if (controller.signal.aborted || latestRequestId !== message.requestId) {
328
+ return;
329
+ }
330
+ drawPartialWaveform(message, peaks);
331
+ } catch (error) {
332
+ if (controller.signal.aborted || latestRequestId !== message.requestId) {
333
+ return;
334
+ }
335
+ postError(message.requestId, error);
336
+ }
337
+ };
338
+ self.addEventListener("message", (event) => {
339
+ const message = event.data;
340
+ if (message.type === "init") {
341
+ canvas = message.canvas;
342
+ return;
343
+ }
344
+ if (message.type === "dispose") {
345
+ currentController?.abort();
346
+ currentController = null;
347
+ canvas = null;
348
+ return;
349
+ }
350
+ renderWaveform(message);
351
+ });