@remotion/studio 4.0.452 → 4.0.454

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (34) hide show
  1. package/dist/audio-waveform-worker.d.ts +1 -0
  2. package/dist/audio-waveform-worker.js +102 -0
  3. package/dist/components/AudioWaveform.d.ts +2 -0
  4. package/dist/components/AudioWaveform.js +168 -18
  5. package/dist/components/CurrentAsset.js +13 -5
  6. package/dist/components/Timeline/LoopedIndicator.js +5 -19
  7. package/dist/components/Timeline/TimelineSequence.js +18 -10
  8. package/dist/components/Timeline/TimelineVideoInfo.d.ts +2 -0
  9. package/dist/components/Timeline/TimelineVideoInfo.js +51 -12
  10. package/dist/components/audio-waveform-worker-types.d.ts +28 -0
  11. package/dist/components/audio-waveform-worker-types.js +2 -0
  12. package/dist/components/draw-peaks.d.ts +1 -1
  13. package/dist/components/load-waveform-peaks.d.ts +11 -1
  14. package/dist/components/load-waveform-peaks.js +33 -36
  15. package/dist/components/looped-media-timeline.d.ts +6 -0
  16. package/dist/components/looped-media-timeline.js +14 -0
  17. package/dist/components/slice-waveform-peaks.d.ts +7 -0
  18. package/dist/components/slice-waveform-peaks.js +15 -0
  19. package/dist/components/waveform-peak-processor.d.ts +23 -0
  20. package/dist/components/waveform-peak-processor.js +77 -0
  21. package/dist/esm/audio-waveform-worker.mjs +351 -0
  22. package/dist/esm/{chunk-hxr6txpe.js → chunk-g39hwn0a.js} +434 -108
  23. package/dist/esm/internals.mjs +434 -108
  24. package/dist/esm/previewEntry.mjs +434 -108
  25. package/dist/esm/renderEntry.mjs +1 -1
  26. package/dist/helpers/calculate-timeline.js +16 -0
  27. package/dist/helpers/extract-frames.js +12 -3
  28. package/dist/helpers/get-duration-or-compute.d.ts +2 -0
  29. package/dist/helpers/get-duration-or-compute.js +10 -0
  30. package/dist/helpers/get-timeline-nestedness.js +2 -1
  31. package/dist/helpers/use-max-media-duration.js +2 -2
  32. package/dist/make-audio-waveform-worker.d.ts +1 -0
  33. package/dist/make-audio-waveform-worker.js +10 -0
  34. package/package.json +19 -10
@@ -0,0 +1 @@
1
+ export {};
@@ -0,0 +1,102 @@
1
+ "use strict";
2
+ /// <reference lib="webworker" />
3
+ Object.defineProperty(exports, "__esModule", { value: true });
4
+ const draw_peaks_1 = require("./components/draw-peaks");
5
+ const load_waveform_peaks_1 = require("./components/load-waveform-peaks");
6
+ const looped_media_timeline_1 = require("./components/looped-media-timeline");
7
+ const slice_waveform_peaks_1 = require("./components/slice-waveform-peaks");
8
+ let canvas = null;
9
+ let currentController = null;
10
+ let latestRequestId = 0;
11
+ const postError = (requestId, error) => {
12
+ const message = error instanceof Error ? error.message : 'Failed to render waveform';
13
+ const payload = {
14
+ type: 'error',
15
+ requestId,
16
+ message,
17
+ };
18
+ self.postMessage(payload);
19
+ };
20
+ const drawPartialWaveform = (message, peaks) => {
21
+ if (!canvas) {
22
+ return;
23
+ }
24
+ const portionPeaks = (0, slice_waveform_peaks_1.sliceWaveformPeaks)({
25
+ durationInFrames: (0, looped_media_timeline_1.shouldTileLoopDisplay)(message.loopDisplay)
26
+ ? message.loopDisplay.durationInFrames
27
+ : message.durationInFrames,
28
+ fps: message.fps,
29
+ peaks,
30
+ playbackRate: message.playbackRate,
31
+ startFrom: message.startFrom,
32
+ });
33
+ if (!(0, looped_media_timeline_1.shouldTileLoopDisplay)(message.loopDisplay)) {
34
+ (0, draw_peaks_1.drawBars)(canvas, portionPeaks, 'rgba(255, 255, 255, 0.6)', message.volume, message.width);
35
+ return;
36
+ }
37
+ const loopWidth = (0, looped_media_timeline_1.getLoopDisplayWidth)({
38
+ visualizationWidth: message.width,
39
+ loopDisplay: message.loopDisplay,
40
+ });
41
+ const targetCanvas = new OffscreenCanvas(Math.max(1, Math.ceil(loopWidth)), message.height);
42
+ (0, draw_peaks_1.drawBars)(targetCanvas, portionPeaks, 'rgba(255, 255, 255, 0.6)', message.volume, targetCanvas.width);
43
+ const ctx = canvas.getContext('2d');
44
+ if (!ctx) {
45
+ throw new Error('Failed to get canvas context');
46
+ }
47
+ const pattern = ctx.createPattern(targetCanvas, 'repeat-x');
48
+ if (!pattern) {
49
+ return;
50
+ }
51
+ pattern.setTransform(new DOMMatrix().scaleSelf(loopWidth / targetCanvas.width, 1));
52
+ ctx.clearRect(0, 0, message.width, message.height);
53
+ ctx.fillStyle = pattern;
54
+ ctx.fillRect(0, 0, message.width, message.height);
55
+ };
56
+ const renderWaveform = async (message) => {
57
+ if (!canvas) {
58
+ postError(message.requestId, new Error('Waveform canvas not initialized'));
59
+ return;
60
+ }
61
+ const controller = new AbortController();
62
+ currentController === null || currentController === void 0 ? void 0 : currentController.abort();
63
+ currentController = controller;
64
+ latestRequestId = message.requestId;
65
+ try {
66
+ canvas.width = message.width;
67
+ canvas.height = message.height;
68
+ const peaks = await (0, load_waveform_peaks_1.loadWaveformPeaks)(message.src, controller.signal, {
69
+ onProgress: ({ peaks: nextPeaks }) => {
70
+ if (controller.signal.aborted ||
71
+ latestRequestId !== message.requestId) {
72
+ return;
73
+ }
74
+ drawPartialWaveform(message, nextPeaks);
75
+ },
76
+ });
77
+ if (controller.signal.aborted || latestRequestId !== message.requestId) {
78
+ return;
79
+ }
80
+ drawPartialWaveform(message, peaks);
81
+ }
82
+ catch (error) {
83
+ if (controller.signal.aborted || latestRequestId !== message.requestId) {
84
+ return;
85
+ }
86
+ postError(message.requestId, error);
87
+ }
88
+ };
89
+ self.addEventListener('message', (event) => {
90
+ const message = event.data;
91
+ if (message.type === 'init') {
92
+ canvas = message.canvas;
93
+ return;
94
+ }
95
+ if (message.type === 'dispose') {
96
+ currentController === null || currentController === void 0 ? void 0 : currentController.abort();
97
+ currentController = null;
98
+ canvas = null;
99
+ return;
100
+ }
101
+ renderWaveform(message);
102
+ });
@@ -1,4 +1,5 @@
1
1
  import React from 'react';
2
+ import type { LoopDisplay } from 'remotion';
2
3
  export declare const AudioWaveform: React.FC<{
3
4
  readonly src: string;
4
5
  readonly visualizationWidth: number;
@@ -7,4 +8,5 @@ export declare const AudioWaveform: React.FC<{
7
8
  readonly volume: string | number;
8
9
  readonly doesVolumeChange: boolean;
9
10
  readonly playbackRate: number;
11
+ readonly loopDisplay: LoopDisplay | undefined;
10
12
  }>;
@@ -6,8 +6,25 @@ const react_1 = require("react");
6
6
  const remotion_1 = require("remotion");
7
7
  const colors_1 = require("../helpers/colors");
8
8
  const timeline_layout_1 = require("../helpers/timeline-layout");
9
+ const make_audio_waveform_worker_1 = require("../make-audio-waveform-worker");
9
10
  const draw_peaks_1 = require("./draw-peaks");
10
11
  const load_waveform_peaks_1 = require("./load-waveform-peaks");
12
+ const looped_media_timeline_1 = require("./looped-media-timeline");
13
+ const slice_waveform_peaks_1 = require("./slice-waveform-peaks");
14
+ const EMPTY_PEAKS = new Float32Array(0);
15
+ // Recreate the canvas after Fast Refresh because an already transferred canvas
16
+ // cannot be handed to OffscreenCanvas again.
17
+ const canRetryCanvasTransfer = (err) => {
18
+ return err instanceof DOMException && err.name === 'InvalidStateError';
19
+ };
20
+ const canUseAudioWaveformWorker = () => {
21
+ if (typeof Worker === 'undefined' ||
22
+ typeof OffscreenCanvas === 'undefined' ||
23
+ typeof HTMLCanvasElement === 'undefined') {
24
+ return false;
25
+ }
26
+ return 'transferControlToOffscreen' in HTMLCanvasElement.prototype;
27
+ };
11
28
  const container = {
12
29
  display: 'flex',
13
30
  flexDirection: 'row',
@@ -27,13 +44,39 @@ const errorMessage = {
27
44
  };
28
45
  const waveformCanvasStyle = {
29
46
  pointerEvents: 'none',
47
+ width: '100%',
48
+ height: '100%',
30
49
  };
31
50
  const volumeCanvasStyle = {
32
51
  position: 'absolute',
33
52
  };
34
- const AudioWaveform = ({ src, startFrom, durationInFrames, visualizationWidth, volume, doesVolumeChange, playbackRate, }) => {
53
+ const drawLoopedWaveform = ({ canvas, peaks, volume, visualizationWidth, loopWidth, }) => {
54
+ const h = canvas.height;
55
+ const w = Math.ceil(visualizationWidth);
56
+ const targetCanvas = document.createElement('canvas');
57
+ targetCanvas.width = Math.max(1, Math.ceil(loopWidth));
58
+ targetCanvas.height = h;
59
+ (0, draw_peaks_1.drawBars)(targetCanvas, peaks, 'rgba(255, 255, 255, 0.6)', volume, targetCanvas.width);
60
+ canvas.width = w;
61
+ canvas.height = h;
62
+ const ctx = canvas.getContext('2d');
63
+ if (!ctx) {
64
+ throw new Error('Failed to get canvas context');
65
+ }
66
+ const pattern = ctx.createPattern(targetCanvas, 'repeat-x');
67
+ if (!pattern) {
68
+ return;
69
+ }
70
+ pattern.setTransform(new DOMMatrix().scaleSelf(loopWidth / targetCanvas.width, 1));
71
+ ctx.clearRect(0, 0, w, h);
72
+ ctx.fillStyle = pattern;
73
+ ctx.fillRect(0, 0, w, h);
74
+ };
75
+ const AudioWaveform = ({ src, startFrom, durationInFrames, visualizationWidth, volume, doesVolumeChange, playbackRate, loopDisplay, }) => {
35
76
  const [peaks, setPeaks] = (0, react_1.useState)(null);
36
77
  const [error, setError] = (0, react_1.useState)(null);
78
+ const [waveformCanvasKey, setWaveformCanvasKey] = (0, react_1.useState)(0);
79
+ const canUseWorkerPath = (0, react_1.useMemo)(() => canUseAudioWaveformWorker(), []);
37
80
  const vidConf = remotion_1.Internals.useUnsafeVideoConfig();
38
81
  if (vidConf === null) {
39
82
  throw new Error('Expected video config');
@@ -41,8 +84,15 @@ const AudioWaveform = ({ src, startFrom, durationInFrames, visualizationWidth, v
41
84
  const containerRef = (0, react_1.useRef)(null);
42
85
  const waveformCanvas = (0, react_1.useRef)(null);
43
86
  const volumeCanvas = (0, react_1.useRef)(null);
87
+ const waveformWorker = (0, react_1.useRef)(null);
88
+ const hasTransferredCanvas = (0, react_1.useRef)(false);
89
+ const latestRequestId = (0, react_1.useRef)(0);
44
90
  (0, react_1.useEffect)(() => {
91
+ if (canUseWorkerPath) {
92
+ return;
93
+ }
45
94
  const controller = new AbortController();
95
+ setPeaks(null);
46
96
  setError(null);
47
97
  (0, load_waveform_peaks_1.loadWaveformPeaks)(src, controller.signal)
48
98
  .then((p) => {
@@ -56,33 +106,131 @@ const AudioWaveform = ({ src, startFrom, durationInFrames, visualizationWidth, v
56
106
  }
57
107
  });
58
108
  return () => controller.abort();
59
- }, [src]);
109
+ }, [canUseWorkerPath, src]);
110
+ (0, react_1.useEffect)(() => {
111
+ if (!canUseWorkerPath) {
112
+ return;
113
+ }
114
+ const canvasElement = waveformCanvas.current;
115
+ if (!canvasElement || hasTransferredCanvas.current) {
116
+ return;
117
+ }
118
+ const worker = (0, make_audio_waveform_worker_1.makeAudioWaveformWorker)();
119
+ waveformWorker.current = worker;
120
+ worker.addEventListener('message', (event) => {
121
+ if (event.data.type === 'error') {
122
+ if (event.data.requestId !== latestRequestId.current) {
123
+ return;
124
+ }
125
+ setError(new Error(event.data.message));
126
+ }
127
+ });
128
+ let offscreen;
129
+ try {
130
+ offscreen = canvasElement.transferControlToOffscreen();
131
+ }
132
+ catch (err) {
133
+ worker.terminate();
134
+ waveformWorker.current = null;
135
+ if (canRetryCanvasTransfer(err)) {
136
+ setWaveformCanvasKey((key) => key + 1);
137
+ return;
138
+ }
139
+ throw err;
140
+ }
141
+ hasTransferredCanvas.current = true;
142
+ worker.postMessage({ type: 'init', canvas: offscreen }, [offscreen]);
143
+ return () => {
144
+ worker.postMessage({ type: 'dispose' });
145
+ worker.terminate();
146
+ waveformWorker.current = null;
147
+ hasTransferredCanvas.current = false;
148
+ };
149
+ }, [canUseWorkerPath, waveformCanvasKey]);
60
150
  const portionPeaks = (0, react_1.useMemo)(() => {
61
- if (!peaks || peaks.length === 0) {
151
+ if (canUseWorkerPath || !peaks) {
62
152
  return null;
63
153
  }
64
- const startTimeInSeconds = startFrom / vidConf.fps;
65
- const durationInSeconds = (durationInFrames / vidConf.fps) * playbackRate;
66
- const startPeakIndex = Math.floor(startTimeInSeconds * load_waveform_peaks_1.TARGET_SAMPLE_RATE);
67
- const endPeakIndex = Math.ceil((startTimeInSeconds + durationInSeconds) * load_waveform_peaks_1.TARGET_SAMPLE_RATE);
68
- return peaks.slice(Math.max(0, startPeakIndex), Math.min(peaks.length, endPeakIndex));
69
- }, [peaks, startFrom, durationInFrames, vidConf.fps, playbackRate]);
154
+ return (0, slice_waveform_peaks_1.sliceWaveformPeaks)({
155
+ durationInFrames: (0, looped_media_timeline_1.shouldTileLoopDisplay)(loopDisplay)
156
+ ? loopDisplay.durationInFrames
157
+ : durationInFrames,
158
+ fps: vidConf.fps,
159
+ peaks,
160
+ playbackRate,
161
+ startFrom,
162
+ });
163
+ }, [
164
+ canUseWorkerPath,
165
+ durationInFrames,
166
+ loopDisplay,
167
+ peaks,
168
+ playbackRate,
169
+ startFrom,
170
+ vidConf.fps,
171
+ ]);
70
172
  (0, react_1.useEffect)(() => {
71
173
  const { current: canvasElement } = waveformCanvas;
72
174
  const { current: containerElement } = containerRef;
73
- if (!canvasElement ||
74
- !containerElement ||
75
- !portionPeaks ||
76
- portionPeaks.length === 0) {
175
+ if (!canvasElement || !containerElement) {
77
176
  return;
78
177
  }
79
178
  const h = containerElement.clientHeight;
80
179
  const w = Math.ceil(visualizationWidth);
180
+ const vol = typeof volume === 'number' ? volume : 1;
181
+ if (canUseWorkerPath) {
182
+ const worker = waveformWorker.current;
183
+ if (!worker || !hasTransferredCanvas.current) {
184
+ return;
185
+ }
186
+ latestRequestId.current += 1;
187
+ setError(null);
188
+ const message = {
189
+ type: 'render',
190
+ requestId: latestRequestId.current,
191
+ src,
192
+ width: w,
193
+ height: h,
194
+ volume: vol,
195
+ startFrom,
196
+ durationInFrames,
197
+ fps: vidConf.fps,
198
+ playbackRate,
199
+ loopDisplay,
200
+ };
201
+ worker.postMessage(message);
202
+ return;
203
+ }
81
204
  canvasElement.width = w;
82
205
  canvasElement.height = h;
83
- const vol = typeof volume === 'number' ? volume : 1;
84
- (0, draw_peaks_1.drawBars)(canvasElement, portionPeaks, 'rgba(255, 255, 255, 0.6)', vol, w);
85
- }, [portionPeaks, visualizationWidth, volume]);
206
+ if ((0, looped_media_timeline_1.shouldTileLoopDisplay)(loopDisplay)) {
207
+ drawLoopedWaveform({
208
+ canvas: canvasElement,
209
+ peaks: portionPeaks !== null && portionPeaks !== void 0 ? portionPeaks : EMPTY_PEAKS,
210
+ volume: vol,
211
+ visualizationWidth,
212
+ loopWidth: (0, looped_media_timeline_1.getLoopDisplayWidth)({
213
+ visualizationWidth,
214
+ loopDisplay,
215
+ }),
216
+ });
217
+ }
218
+ else {
219
+ (0, draw_peaks_1.drawBars)(canvasElement, portionPeaks !== null && portionPeaks !== void 0 ? portionPeaks : EMPTY_PEAKS, 'rgba(255, 255, 255, 0.6)', vol, w);
220
+ }
221
+ }, [
222
+ canUseWorkerPath,
223
+ durationInFrames,
224
+ loopDisplay,
225
+ playbackRate,
226
+ portionPeaks,
227
+ src,
228
+ startFrom,
229
+ vidConf.fps,
230
+ visualizationWidth,
231
+ volume,
232
+ waveformCanvasKey,
233
+ ]);
86
234
  (0, react_1.useEffect)(() => {
87
235
  const { current: volumeCanvasElement } = volumeCanvas;
88
236
  const { current: containerElement } = containerRef;
@@ -117,13 +265,15 @@ const AudioWaveform = ({ src, startFrom, durationInFrames, visualizationWidth, v
117
265
  context.stroke();
118
266
  }, [visualizationWidth, volume, doesVolumeChange]);
119
267
  if (error) {
268
+ // eslint-disable-next-line no-console
269
+ console.error(error);
120
270
  return (jsx_runtime_1.jsx("div", { style: container, children: jsx_runtime_1.jsx("div", { style: errorMessage, children: "No waveform available. Audio might not support CORS." }) }));
121
271
  }
122
- if (!peaks) {
272
+ if (!canUseWorkerPath && !peaks) {
123
273
  return null;
124
274
  }
125
275
  return (jsx_runtime_1.jsxs("div", { ref: containerRef, style: container, children: [
126
- jsx_runtime_1.jsx("canvas", { ref: waveformCanvas, style: waveformCanvasStyle }), jsx_runtime_1.jsx("canvas", { ref: volumeCanvas, style: volumeCanvasStyle })
276
+ jsx_runtime_1.jsx("canvas", { ref: waveformCanvas, style: waveformCanvasStyle }, waveformCanvasKey), jsx_runtime_1.jsx("canvas", { ref: volumeCanvas, style: volumeCanvasStyle })
127
277
  ] }));
128
278
  };
129
279
  exports.AudioWaveform = AudioWaveform;
@@ -7,6 +7,7 @@ const mediabunny_1 = require("mediabunny");
7
7
  const react_1 = require("react");
8
8
  const remotion_1 = require("remotion");
9
9
  const colors_1 = require("../helpers/colors");
10
+ const get_duration_or_compute_1 = require("../helpers/get-duration-or-compute");
10
11
  const use_static_files_1 = require("./use-static-files");
11
12
  exports.CURRENT_ASSET_HEIGHT = 80;
12
13
  const container = {
@@ -72,17 +73,24 @@ const CurrentAsset = () => {
72
73
  source: new mediabunny_1.UrlSource(url),
73
74
  });
74
75
  Promise.all([
75
- input.computeDuration(),
76
+ (0, get_duration_or_compute_1.getDurationOrCompute)(input),
76
77
  input.getFormat(),
77
78
  input.getPrimaryVideoTrack(),
78
79
  ])
79
- .then(([duration, format, videoTrack]) => {
80
- var _a, _b;
80
+ .then(async ([duration, format, videoTrack]) => {
81
+ if (videoTrack && (await videoTrack.isLive())) {
82
+ throw new Error('Live streams are not currently supported by Remotion. Sorry! Source: ' +
83
+ url);
84
+ }
85
+ if (videoTrack && (await videoTrack.isRelativeToUnixEpoch())) {
86
+ throw new Error('Streams with UNIX timestamps are not currently supported by Remotion. Sorry! Source: ' +
87
+ url);
88
+ }
81
89
  setMediaMetadata({
82
90
  duration,
83
91
  format: format.name,
84
- width: (_a = videoTrack === null || videoTrack === void 0 ? void 0 : videoTrack.displayWidth) !== null && _a !== void 0 ? _a : null,
85
- height: (_b = videoTrack === null || videoTrack === void 0 ? void 0 : videoTrack.displayHeight) !== null && _b !== void 0 ? _b : null,
92
+ width: videoTrack ? await videoTrack.getDisplayWidth() : null,
93
+ height: videoTrack ? await videoTrack.getDisplayHeight() : null,
86
94
  });
87
95
  })
88
96
  .catch(() => {
@@ -12,35 +12,21 @@ const width = {
12
12
  };
13
13
  const icon = {
14
14
  height: 12,
15
+ filter: 'drop-shadow(0 0 2px rgba(0, 0, 0, 0.9)) drop-shadow(0 1px 2px rgba(0, 0, 0, 0.8))',
15
16
  };
16
17
  const Icon = () => (jsx_runtime_1.jsx("svg", { viewBox: "0 0 512 512", style: icon, children: jsx_runtime_1.jsx("path", { fill: colors_1.LIGHT_COLOR, d: "M512 256c0 88.224-71.775 160-160 160H170.067l34.512 32.419c9.875 9.276 10.119 24.883.539 34.464l-10.775 10.775c-9.373 9.372-24.568 9.372-33.941 0l-92.686-92.686c-9.373-9.373-9.373-24.568 0-33.941l92.686-92.686c9.373-9.373 24.568-9.373 33.941 0l10.775 10.775c9.581 9.581 9.337 25.187-.539 34.464L170.067 352H352c52.935 0 96-43.065 96-96 0-13.958-2.996-27.228-8.376-39.204-4.061-9.039-2.284-19.626 4.723-26.633l12.183-12.183c11.499-11.499 30.965-8.526 38.312 5.982C505.814 205.624 512 230.103 512 256zM72.376 295.204C66.996 283.228 64 269.958 64 256c0-52.935 43.065-96 96-96h181.933l-34.512 32.419c-9.875 9.276-10.119 24.883-.539 34.464l10.775 10.775c9.373 9.372 24.568 9.372 33.941 0l92.686-92.686c9.373-9.373 9.373-24.568 0-33.941l-92.686-92.686c-9.373-9.373-24.568-9.373-33.941 0L306.882 29.12c-9.581 9.581-9.337 25.187.539 34.464L341.933 96H160C71.775 96 0 167.776 0 256c0 25.897 6.186 50.376 17.157 72.039 7.347 14.508 26.813 17.481 38.312 5.982l12.183-12.183c7.008-7.008 8.786-17.595 4.724-26.634z" }) }));
17
- const topLine = {
18
- top: 0,
19
- height: 2,
18
+ const verticalLine = {
19
+ height: '100%',
20
20
  width: 1,
21
- background: colors_1.LIGHT_COLOR,
22
- };
23
- const bottomLine = {
24
- top: 0,
25
- height: 2,
26
- width: 1,
27
- background: colors_1.LIGHT_COLOR,
28
- };
29
- const topContainer = {
30
- justifyContent: 'flex-start',
31
- alignItems: 'center',
21
+ background: 'rgb(255,255,255, 0.5)',
32
22
  };
33
23
  const centerContainer = {
34
24
  justifyContent: 'center',
35
25
  alignItems: 'center',
36
26
  };
37
- const bottomContainer = {
38
- justifyContent: 'flex-end',
39
- alignItems: 'center',
40
- };
41
27
  const LoopedIndicator = () => {
42
28
  return (jsx_runtime_1.jsxs("div", { style: width, children: [
43
- jsx_runtime_1.jsx(remotion_1.AbsoluteFill, { style: topContainer, children: jsx_runtime_1.jsx("div", { style: topLine }) }), jsx_runtime_1.jsx(remotion_1.AbsoluteFill, { style: bottomContainer, children: jsx_runtime_1.jsx("div", { style: bottomLine }) }), jsx_runtime_1.jsx(remotion_1.AbsoluteFill, { style: centerContainer, children: jsx_runtime_1.jsx(Icon, {}) })
29
+ jsx_runtime_1.jsx(remotion_1.AbsoluteFill, { style: centerContainer, children: jsx_runtime_1.jsx("div", { style: verticalLine }) }), jsx_runtime_1.jsx(remotion_1.AbsoluteFill, { style: centerContainer, children: jsx_runtime_1.jsx(Icon, {}) })
44
30
  ] }));
45
31
  };
46
32
  exports.LoopedIndicator = LoopedIndicator;
@@ -32,35 +32,43 @@ const Inner = ({ s, windowWidth }) => {
32
32
  var _a, _b, _c;
33
33
  const video = remotion_1.Internals.useVideo();
34
34
  const maxMediaDuration = (0, use_max_media_duration_1.useMaxMediaDuration)(s, (_a = video === null || video === void 0 ? void 0 : video.fps) !== null && _a !== void 0 ? _a : 30);
35
+ const effectiveMaxMediaDuration = s.loopDisplay ? null : maxMediaDuration;
35
36
  if (!video) {
36
37
  throw new TypeError('Expected video config');
37
38
  }
38
39
  const frame = (0, remotion_1.useCurrentFrame)();
39
40
  const relativeFrame = frame - s.from;
41
+ const displayDurationInFrames = s.loopDisplay
42
+ ? s.loopDisplay.durationInFrames * s.loopDisplay.numberOfTimes
43
+ : s.duration;
40
44
  const relativeFrameWithPremount = relativeFrame + ((_b = s.premountDisplay) !== null && _b !== void 0 ? _b : 0);
41
- const relativeFrameWithPostmount = relativeFrame - s.duration;
45
+ const relativeFrameWithPostmount = relativeFrame - displayDurationInFrames;
42
46
  const roundedFrame = Math.round(relativeFrame * 100) / 100;
43
- const isInRange = relativeFrame >= 0 && relativeFrame < s.duration;
47
+ const isInRange = relativeFrame >= 0 && relativeFrame < displayDurationInFrames;
44
48
  const isPremounting = relativeFrameWithPremount >= 0 &&
45
- relativeFrameWithPremount < s.duration &&
49
+ relativeFrameWithPremount < displayDurationInFrames &&
46
50
  !isInRange;
47
51
  const isPostmounting = relativeFrameWithPostmount >= 0 &&
48
52
  relativeFrameWithPostmount < ((_c = s.postmountDisplay) !== null && _c !== void 0 ? _c : 0) &&
49
53
  !isInRange;
50
54
  const { marginLeft, width, naturalWidth, premountWidth, postmountWidth } = (0, react_1.useMemo)(() => {
51
55
  return (0, get_timeline_sequence_layout_1.getTimelineSequenceLayout)({
52
- durationInFrames: s.loopDisplay
53
- ? s.loopDisplay.durationInFrames * s.loopDisplay.numberOfTimes
54
- : s.duration,
56
+ durationInFrames: displayDurationInFrames,
55
57
  startFrom: s.loopDisplay ? s.from + s.loopDisplay.startOffset : s.from,
56
58
  startFromMedia: s.type === 'sequence' || s.type === 'image' ? 0 : s.startMediaFrom,
57
- maxMediaDuration,
59
+ maxMediaDuration: effectiveMaxMediaDuration,
58
60
  video,
59
61
  windowWidth,
60
62
  premountDisplay: s.premountDisplay,
61
63
  postmountDisplay: s.postmountDisplay,
62
64
  });
63
- }, [maxMediaDuration, s, video, windowWidth]);
65
+ }, [
66
+ displayDurationInFrames,
67
+ effectiveMaxMediaDuration,
68
+ s,
69
+ video,
70
+ windowWidth,
71
+ ]);
64
72
  const style = (0, react_1.useMemo)(() => {
65
73
  return {
66
74
  background: s.type === 'audio'
@@ -81,7 +89,7 @@ const Inner = ({ s, windowWidth }) => {
81
89
  opacity: isInRange ? 1 : 0.5,
82
90
  };
83
91
  }, [isInRange, marginLeft, s.type, width]);
84
- if (maxMediaDuration === null) {
92
+ if (maxMediaDuration === null && !s.loopDisplay) {
85
93
  return null;
86
94
  }
87
95
  return (jsx_runtime_1.jsxs("div", { style: style, title: s.displayName, children: [premountWidth ? (jsx_runtime_1.jsx("div", { style: {
@@ -107,7 +115,7 @@ const Inner = ({ s, windowWidth }) => {
107
115
  )`,
108
116
  position: 'absolute',
109
117
  right: 0,
110
- } })) : null, s.type === 'audio' ? (jsx_runtime_1.jsx(AudioWaveform_1.AudioWaveform, { src: s.src, doesVolumeChange: s.doesVolumeChange, visualizationWidth: width, startFrom: s.startMediaFrom, durationInFrames: s.duration, volume: s.volume, playbackRate: s.playbackRate })) : null, s.type === 'video' ? (jsx_runtime_1.jsx(TimelineVideoInfo_1.TimelineVideoInfo, { src: s.src, visualizationWidth: width, naturalWidth: naturalWidth, trimBefore: s.startMediaFrom, durationInFrames: s.duration, playbackRate: s.playbackRate, volume: s.volume, doesVolumeChange: s.doesVolumeChange, premountWidth: premountWidth !== null && premountWidth !== void 0 ? premountWidth : 0, postmountWidth: postmountWidth !== null && postmountWidth !== void 0 ? postmountWidth : 0 })) : null, s.type === 'image' ? (jsx_runtime_1.jsx(TimelineImageInfo_1.TimelineImageInfo, { src: s.src, visualizationWidth: width })) : null, s.loopDisplay === undefined ? null : (jsx_runtime_1.jsx(LoopedTimelineIndicators_1.LoopedTimelineIndicator, { loops: s.loopDisplay.numberOfTimes })), s.type !== 'audio' &&
118
+ } })) : null, s.type === 'audio' ? (jsx_runtime_1.jsx(AudioWaveform_1.AudioWaveform, { src: s.src, doesVolumeChange: s.doesVolumeChange, visualizationWidth: width, startFrom: s.startMediaFrom, durationInFrames: s.duration, volume: s.volume, playbackRate: s.playbackRate, loopDisplay: s.loopDisplay })) : null, s.type === 'video' ? (jsx_runtime_1.jsx(TimelineVideoInfo_1.TimelineVideoInfo, { src: s.src, visualizationWidth: width, naturalWidth: naturalWidth, trimBefore: s.startMediaFrom, durationInFrames: s.duration, playbackRate: s.playbackRate, volume: s.volume, doesVolumeChange: s.doesVolumeChange, premountWidth: premountWidth !== null && premountWidth !== void 0 ? premountWidth : 0, postmountWidth: postmountWidth !== null && postmountWidth !== void 0 ? postmountWidth : 0, loopDisplay: s.loopDisplay })) : null, s.type === 'image' ? (jsx_runtime_1.jsx(TimelineImageInfo_1.TimelineImageInfo, { src: s.src, visualizationWidth: width })) : null, s.loopDisplay === undefined ? null : (jsx_runtime_1.jsx(LoopedTimelineIndicators_1.LoopedTimelineIndicator, { loops: s.loopDisplay.numberOfTimes })), s.type !== 'audio' &&
111
119
  s.type !== 'video' &&
112
120
  s.type !== 'image' &&
113
121
  s.loopDisplay === undefined &&
@@ -1,4 +1,5 @@
1
1
  import React from 'react';
2
+ import type { LoopDisplay } from 'remotion';
2
3
  export declare const TimelineVideoInfo: React.FC<{
3
4
  readonly src: string;
4
5
  readonly visualizationWidth: number;
@@ -10,4 +11,5 @@ export declare const TimelineVideoInfo: React.FC<{
10
11
  readonly doesVolumeChange: boolean;
11
12
  readonly premountWidth: number;
12
13
  readonly postmountWidth: number;
14
+ readonly loopDisplay: LoopDisplay | undefined;
13
15
  }>;