@remotion/studio 4.0.452 → 4.0.454

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (34) hide show
  1. package/dist/audio-waveform-worker.d.ts +1 -0
  2. package/dist/audio-waveform-worker.js +102 -0
  3. package/dist/components/AudioWaveform.d.ts +2 -0
  4. package/dist/components/AudioWaveform.js +168 -18
  5. package/dist/components/CurrentAsset.js +13 -5
  6. package/dist/components/Timeline/LoopedIndicator.js +5 -19
  7. package/dist/components/Timeline/TimelineSequence.js +18 -10
  8. package/dist/components/Timeline/TimelineVideoInfo.d.ts +2 -0
  9. package/dist/components/Timeline/TimelineVideoInfo.js +51 -12
  10. package/dist/components/audio-waveform-worker-types.d.ts +28 -0
  11. package/dist/components/audio-waveform-worker-types.js +2 -0
  12. package/dist/components/draw-peaks.d.ts +1 -1
  13. package/dist/components/load-waveform-peaks.d.ts +11 -1
  14. package/dist/components/load-waveform-peaks.js +33 -36
  15. package/dist/components/looped-media-timeline.d.ts +6 -0
  16. package/dist/components/looped-media-timeline.js +14 -0
  17. package/dist/components/slice-waveform-peaks.d.ts +7 -0
  18. package/dist/components/slice-waveform-peaks.js +15 -0
  19. package/dist/components/waveform-peak-processor.d.ts +23 -0
  20. package/dist/components/waveform-peak-processor.js +77 -0
  21. package/dist/esm/audio-waveform-worker.mjs +351 -0
  22. package/dist/esm/{chunk-hxr6txpe.js → chunk-g39hwn0a.js} +434 -108
  23. package/dist/esm/internals.mjs +434 -108
  24. package/dist/esm/previewEntry.mjs +434 -108
  25. package/dist/esm/renderEntry.mjs +1 -1
  26. package/dist/helpers/calculate-timeline.js +16 -0
  27. package/dist/helpers/extract-frames.js +12 -3
  28. package/dist/helpers/get-duration-or-compute.d.ts +2 -0
  29. package/dist/helpers/get-duration-or-compute.js +10 -0
  30. package/dist/helpers/get-timeline-nestedness.js +2 -1
  31. package/dist/helpers/use-max-media-duration.js +2 -2
  32. package/dist/make-audio-waveform-worker.d.ts +1 -0
  33. package/dist/make-audio-waveform-worker.js +10 -0
  34. package/package.json +19 -10
@@ -4385,6 +4385,13 @@ import { ALL_FORMATS, Input, UrlSource } from "mediabunny";
4385
4385
  import { useContext as useContext12, useEffect as useEffect14, useMemo as useMemo24, useState as useState17 } from "react";
4386
4386
  import { Internals as Internals9, staticFile } from "remotion";
4387
4387
 
4388
+ // src/helpers/get-duration-or-compute.ts
4389
+ var getDurationOrCompute = async (input) => {
4390
+ return await input.getDurationFromMetadata(undefined, {
4391
+ skipLiveWait: true
4392
+ }) ?? input.computeDuration(undefined, { skipLiveWait: true });
4393
+ };
4394
+
4388
4395
  // src/components/use-static-files.ts
4389
4396
  import React26, { createContext as createContext10, useContext as useContext11, useEffect as useEffect13, useState as useState16 } from "react";
4390
4397
  import { useRemotionEnvironment } from "remotion";
@@ -4528,15 +4535,21 @@ var CurrentAsset = () => {
4528
4535
  source: new UrlSource(url)
4529
4536
  });
4530
4537
  Promise.all([
4531
- input.computeDuration(),
4538
+ getDurationOrCompute(input),
4532
4539
  input.getFormat(),
4533
4540
  input.getPrimaryVideoTrack()
4534
- ]).then(([duration, format, videoTrack]) => {
4541
+ ]).then(async ([duration, format, videoTrack]) => {
4542
+ if (videoTrack && await videoTrack.isLive()) {
4543
+ throw new Error("Live streams are not currently supported by Remotion. Sorry! Source: " + url);
4544
+ }
4545
+ if (videoTrack && await videoTrack.isRelativeToUnixEpoch()) {
4546
+ throw new Error("Streams with UNIX timestamps are not currently supported by Remotion. Sorry! Source: " + url);
4547
+ }
4535
4548
  setMediaMetadata({
4536
4549
  duration,
4537
4550
  format: format.name,
4538
- width: videoTrack?.displayWidth ?? null,
4539
- height: videoTrack?.displayHeight ?? null
4551
+ width: videoTrack ? await videoTrack.getDisplayWidth() : null,
4552
+ height: videoTrack ? await videoTrack.getDisplayHeight() : null
4540
4553
  });
4541
4554
  }).catch(() => {});
4542
4555
  return () => {
@@ -20458,7 +20471,8 @@ var getTimelineNestedLevel = (sequence, allSequences, depth) => {
20458
20471
  if (!parentSequence) {
20459
20472
  throw new Error("has parentId but no parent");
20460
20473
  }
20461
- return getTimelineNestedLevel(parentSequence, allSequences, depth + 1);
20474
+ const parentContributes = parentSequence.showInTimeline;
20475
+ return getTimelineNestedLevel(parentSequence, allSequences, parentContributes ? depth + 1 : depth);
20462
20476
  };
20463
20477
 
20464
20478
  // src/helpers/get-timeline-sequence-hash.ts
@@ -20507,6 +20521,19 @@ var getTimelineSequenceSequenceSortKey = (track, tracks, sameHashes = {}, nonceR
20507
20521
  };
20508
20522
 
20509
20523
  // src/helpers/calculate-timeline.ts
20524
+ var getInheritedLoopDisplay = (sequence, sequences) => {
20525
+ if (sequence.loopDisplay) {
20526
+ return sequence.loopDisplay;
20527
+ }
20528
+ if (!sequence.parent) {
20529
+ return;
20530
+ }
20531
+ const parent = sequences.find((s) => s.id === sequence.parent);
20532
+ if (!parent) {
20533
+ return;
20534
+ }
20535
+ return getInheritedLoopDisplay(parent, sequences);
20536
+ };
20510
20537
  var calculateTimeline = ({
20511
20538
  sequences
20512
20539
  }) => {
@@ -20535,7 +20562,8 @@ var calculateTimeline = ({
20535
20562
  sequence: {
20536
20563
  ...sequence,
20537
20564
  from: visibleStart,
20538
- duration: visibleDuration
20565
+ duration: visibleDuration,
20566
+ loopDisplay: sequence.type === "audio" || sequence.type === "video" ? getInheritedLoopDisplay(sequence, sortedSequences) : sequence.loopDisplay
20539
20567
  },
20540
20568
  depth: getTimelineNestedLevel(sequence, sortedSequences, 0),
20541
20569
  hash: actualHash,
@@ -23144,7 +23172,7 @@ var useMaxMediaDuration = (s, fps) => {
23144
23172
  formats: ALL_FORMATS2,
23145
23173
  source: new UrlSource2(src)
23146
23174
  });
23147
- input2.computeDuration().then((duration) => {
23175
+ getDurationOrCompute(input2).then((duration) => {
23148
23176
  cache.set(src, Math.floor(duration * fps));
23149
23177
  setMaxMediaDuration(Math.floor(duration * fps));
23150
23178
  }).catch((e) => {
@@ -23171,6 +23199,13 @@ var useMaxMediaDuration = (s, fps) => {
23171
23199
  import { useEffect as useEffect72, useMemo as useMemo119, useRef as useRef43, useState as useState77 } from "react";
23172
23200
  import { Internals as Internals55 } from "remotion";
23173
23201
 
23202
+ // src/make-audio-waveform-worker.ts
23203
+ var makeAudioWaveformWorker = () => {
23204
+ return new Worker(new URL("./audio-waveform-worker.mjs", import.meta.url), {
23205
+ type: "module"
23206
+ });
23207
+ };
23208
+
23174
23209
  // src/components/parse-color.ts
23175
23210
  var colorCache = new Map;
23176
23211
  var parseColor = (color) => {
@@ -23246,12 +23281,107 @@ var drawBars = (canvas, peaks, color, volume, width) => {
23246
23281
 
23247
23282
  // src/components/load-waveform-peaks.ts
23248
23283
  import { ALL_FORMATS as ALL_FORMATS3, AudioSampleSink, Input as Input3, UrlSource as UrlSource3 } from "mediabunny";
23284
+
23285
+ // src/components/waveform-peak-processor.ts
23286
+ var emitWaveformProgress = ({
23287
+ completedPeaks,
23288
+ final,
23289
+ onProgress,
23290
+ peaks,
23291
+ totalPeaks
23292
+ }) => {
23293
+ onProgress?.({
23294
+ peaks,
23295
+ completedPeaks,
23296
+ totalPeaks,
23297
+ final
23298
+ });
23299
+ };
23300
+ var createWaveformPeakProcessor = ({
23301
+ totalPeaks,
23302
+ samplesPerPeak,
23303
+ onProgress,
23304
+ progressIntervalInMs,
23305
+ now
23306
+ }) => {
23307
+ const peaks = new Float32Array(totalPeaks);
23308
+ let peakIndex = 0;
23309
+ let peakMax = 0;
23310
+ let sampleInPeak = 0;
23311
+ let lastProgressAt = 0;
23312
+ let lastProgressPeak = 0;
23313
+ const emitProgress = (force) => {
23314
+ const timestamp = now();
23315
+ if (!force && peakIndex === lastProgressPeak && sampleInPeak === 0) {
23316
+ return;
23317
+ }
23318
+ if (!force && timestamp - lastProgressAt < progressIntervalInMs) {
23319
+ return;
23320
+ }
23321
+ lastProgressAt = timestamp;
23322
+ lastProgressPeak = peakIndex;
23323
+ emitWaveformProgress({
23324
+ peaks,
23325
+ completedPeaks: peakIndex,
23326
+ totalPeaks,
23327
+ final: force,
23328
+ onProgress
23329
+ });
23330
+ };
23331
+ return {
23332
+ peaks,
23333
+ processSampleChunk: (floats, channels) => {
23334
+ const frameCount = Math.floor(floats.length / Math.max(1, channels));
23335
+ for (let frame2 = 0;frame2 < frameCount; frame2++) {
23336
+ let framePeak = 0;
23337
+ for (let channel = 0;channel < channels; channel++) {
23338
+ const sampleIndex = frame2 * channels + channel;
23339
+ const abs = Math.abs(floats[sampleIndex] ?? 0);
23340
+ if (abs > framePeak) {
23341
+ framePeak = abs;
23342
+ }
23343
+ }
23344
+ if (framePeak > peakMax) {
23345
+ peakMax = framePeak;
23346
+ }
23347
+ sampleInPeak++;
23348
+ if (sampleInPeak >= samplesPerPeak) {
23349
+ if (peakIndex < totalPeaks) {
23350
+ peaks[peakIndex] = peakMax;
23351
+ }
23352
+ peakIndex++;
23353
+ peakMax = 0;
23354
+ sampleInPeak = 0;
23355
+ }
23356
+ }
23357
+ emitProgress(false);
23358
+ },
23359
+ finalize: () => {
23360
+ if (sampleInPeak > 0 && peakIndex < totalPeaks) {
23361
+ peaks[peakIndex] = peakMax;
23362
+ peakIndex++;
23363
+ }
23364
+ emitProgress(true);
23365
+ }
23366
+ };
23367
+ };
23368
+
23369
+ // src/components/load-waveform-peaks.ts
23249
23370
  var TARGET_SAMPLE_RATE = 100;
23371
+ var DEFAULT_PROGRESS_INTERVAL_IN_MS = 50;
23250
23372
  var peaksCache = new Map;
23251
- async function loadWaveformPeaks(url, signal) {
23373
+ async function loadWaveformPeaks(url, signal, options) {
23252
23374
  const cached = peaksCache.get(url);
23253
- if (cached)
23375
+ if (cached) {
23376
+ emitWaveformProgress({
23377
+ peaks: cached,
23378
+ completedPeaks: cached.length,
23379
+ totalPeaks: cached.length,
23380
+ final: true,
23381
+ onProgress: options?.onProgress
23382
+ });
23254
23383
  return cached;
23384
+ }
23255
23385
  const input2 = new Input3({
23256
23386
  formats: ALL_FORMATS3,
23257
23387
  source: new UrlSource3(url)
@@ -23261,15 +23391,24 @@ async function loadWaveformPeaks(url, signal) {
23261
23391
  if (!audioTrack) {
23262
23392
  return new Float32Array(0);
23263
23393
  }
23264
- const { sampleRate } = audioTrack;
23265
- const durationInSeconds = await audioTrack.computeDuration();
23394
+ if (await audioTrack.isLive()) {
23395
+ throw new Error("Live streams are not currently supported by Remotion. Sorry! Source: " + url);
23396
+ }
23397
+ if (await audioTrack.isRelativeToUnixEpoch()) {
23398
+ throw new Error("Streams with UNIX timestamps are not currently supported by Remotion. Sorry! Source: " + url);
23399
+ }
23400
+ const sampleRate = await audioTrack.getSampleRate();
23401
+ const durationInSeconds = await audioTrack.getDurationFromMetadata({ skipLiveWait: true }) ?? await audioTrack.computeDuration({ skipLiveWait: true });
23266
23402
  const totalPeaks = Math.ceil(durationInSeconds * TARGET_SAMPLE_RATE);
23267
23403
  const samplesPerPeak = Math.max(1, Math.floor(sampleRate / TARGET_SAMPLE_RATE));
23268
- const peaks = new Float32Array(totalPeaks);
23269
- let peakIndex = 0;
23270
- let peakMax = 0;
23271
- let sampleInPeak = 0;
23272
23404
  const sink = new AudioSampleSink(audioTrack);
23405
+ const processor = createWaveformPeakProcessor({
23406
+ totalPeaks,
23407
+ samplesPerPeak,
23408
+ onProgress: options?.onProgress,
23409
+ progressIntervalInMs: options?.progressIntervalInMs ?? DEFAULT_PROGRESS_INTERVAL_IN_MS,
23410
+ now: () => Date.now()
23411
+ });
23273
23412
  for await (const sample of sink.samples()) {
23274
23413
  if (signal.aborted) {
23275
23414
  sample.close();
@@ -23282,34 +23421,11 @@ async function loadWaveformPeaks(url, signal) {
23282
23421
  const floats = new Float32Array(bytesNeeded / 4);
23283
23422
  sample.copyTo(floats, { format: "f32", planeIndex: 0 });
23284
23423
  const channels = Math.max(1, sample.numberOfChannels);
23285
- const frames = sample.numberOfFrames;
23286
23424
  sample.close();
23287
- for (let frame2 = 0;frame2 < frames; frame2++) {
23288
- let framePeak = 0;
23289
- for (let channel = 0;channel < channels; channel++) {
23290
- const sampleIndex = frame2 * channels + channel;
23291
- const abs = Math.abs(floats[sampleIndex] ?? 0);
23292
- if (abs > framePeak) {
23293
- framePeak = abs;
23294
- }
23295
- }
23296
- if (framePeak > peakMax) {
23297
- peakMax = framePeak;
23298
- }
23299
- sampleInPeak++;
23300
- if (sampleInPeak >= samplesPerPeak) {
23301
- if (peakIndex < totalPeaks) {
23302
- peaks[peakIndex] = peakMax;
23303
- }
23304
- peakIndex++;
23305
- peakMax = 0;
23306
- sampleInPeak = 0;
23307
- }
23308
- }
23309
- }
23310
- if (sampleInPeak > 0 && peakIndex < totalPeaks) {
23311
- peaks[peakIndex] = peakMax;
23425
+ processor.processSampleChunk(floats, channels);
23312
23426
  }
23427
+ processor.finalize();
23428
+ const { peaks } = processor;
23313
23429
  peaksCache.set(url, peaks);
23314
23430
  return peaks;
23315
23431
  } finally {
@@ -23317,8 +23433,50 @@ async function loadWaveformPeaks(url, signal) {
23317
23433
  }
23318
23434
  }
23319
23435
 
23436
+ // src/components/looped-media-timeline.ts
23437
+ var shouldTileLoopDisplay = (loopDisplay) => {
23438
+ return loopDisplay !== undefined && loopDisplay.numberOfTimes > 1;
23439
+ };
23440
+ var getLoopDisplayWidth = ({
23441
+ visualizationWidth,
23442
+ loopDisplay
23443
+ }) => {
23444
+ if (!shouldTileLoopDisplay(loopDisplay)) {
23445
+ return visualizationWidth;
23446
+ }
23447
+ return visualizationWidth / loopDisplay.numberOfTimes;
23448
+ };
23449
+
23450
+ // src/components/slice-waveform-peaks.ts
23451
+ var sliceWaveformPeaks = ({
23452
+ durationInFrames,
23453
+ fps,
23454
+ peaks,
23455
+ playbackRate,
23456
+ startFrom
23457
+ }) => {
23458
+ if (peaks.length === 0) {
23459
+ return peaks;
23460
+ }
23461
+ const startTimeInSeconds = startFrom / fps;
23462
+ const durationInSeconds = durationInFrames / fps * playbackRate;
23463
+ const startPeakIndex = Math.floor(startTimeInSeconds * TARGET_SAMPLE_RATE);
23464
+ const endPeakIndex = Math.ceil((startTimeInSeconds + durationInSeconds) * TARGET_SAMPLE_RATE);
23465
+ return peaks.subarray(Math.max(0, startPeakIndex), Math.min(peaks.length, endPeakIndex));
23466
+ };
23467
+
23320
23468
  // src/components/AudioWaveform.tsx
23321
23469
  import { jsx as jsx209, jsxs as jsxs101 } from "react/jsx-runtime";
23470
+ var EMPTY_PEAKS = new Float32Array(0);
23471
+ var canRetryCanvasTransfer = (err) => {
23472
+ return err instanceof DOMException && err.name === "InvalidStateError";
23473
+ };
23474
+ var canUseAudioWaveformWorker = () => {
23475
+ if (typeof Worker === "undefined" || typeof OffscreenCanvas === "undefined" || typeof HTMLCanvasElement === "undefined") {
23476
+ return false;
23477
+ }
23478
+ return "transferControlToOffscreen" in HTMLCanvasElement.prototype;
23479
+ };
23322
23480
  var container42 = {
23323
23481
  display: "flex",
23324
23482
  flexDirection: "row",
@@ -23337,11 +23495,41 @@ var errorMessage = {
23337
23495
  opacity: 0.75
23338
23496
  };
23339
23497
  var waveformCanvasStyle = {
23340
- pointerEvents: "none"
23498
+ pointerEvents: "none",
23499
+ width: "100%",
23500
+ height: "100%"
23341
23501
  };
23342
23502
  var volumeCanvasStyle = {
23343
23503
  position: "absolute"
23344
23504
  };
23505
+ var drawLoopedWaveform = ({
23506
+ canvas,
23507
+ peaks,
23508
+ volume,
23509
+ visualizationWidth,
23510
+ loopWidth
23511
+ }) => {
23512
+ const h = canvas.height;
23513
+ const w = Math.ceil(visualizationWidth);
23514
+ const targetCanvas = document.createElement("canvas");
23515
+ targetCanvas.width = Math.max(1, Math.ceil(loopWidth));
23516
+ targetCanvas.height = h;
23517
+ drawBars(targetCanvas, peaks, "rgba(255, 255, 255, 0.6)", volume, targetCanvas.width);
23518
+ canvas.width = w;
23519
+ canvas.height = h;
23520
+ const ctx = canvas.getContext("2d");
23521
+ if (!ctx) {
23522
+ throw new Error("Failed to get canvas context");
23523
+ }
23524
+ const pattern = ctx.createPattern(targetCanvas, "repeat-x");
23525
+ if (!pattern) {
23526
+ return;
23527
+ }
23528
+ pattern.setTransform(new DOMMatrix().scaleSelf(loopWidth / targetCanvas.width, 1));
23529
+ ctx.clearRect(0, 0, w, h);
23530
+ ctx.fillStyle = pattern;
23531
+ ctx.fillRect(0, 0, w, h);
23532
+ };
23345
23533
  var AudioWaveform = ({
23346
23534
  src,
23347
23535
  startFrom,
@@ -23349,10 +23537,13 @@ var AudioWaveform = ({
23349
23537
  visualizationWidth,
23350
23538
  volume,
23351
23539
  doesVolumeChange,
23352
- playbackRate
23540
+ playbackRate,
23541
+ loopDisplay
23353
23542
  }) => {
23354
23543
  const [peaks, setPeaks] = useState77(null);
23355
23544
  const [error, setError] = useState77(null);
23545
+ const [waveformCanvasKey, setWaveformCanvasKey] = useState77(0);
23546
+ const canUseWorkerPath = useMemo119(() => canUseAudioWaveformWorker(), []);
23356
23547
  const vidConf = Internals55.useUnsafeVideoConfig();
23357
23548
  if (vidConf === null) {
23358
23549
  throw new Error("Expected video config");
@@ -23360,8 +23551,15 @@ var AudioWaveform = ({
23360
23551
  const containerRef = useRef43(null);
23361
23552
  const waveformCanvas = useRef43(null);
23362
23553
  const volumeCanvas = useRef43(null);
23554
+ const waveformWorker = useRef43(null);
23555
+ const hasTransferredCanvas = useRef43(false);
23556
+ const latestRequestId = useRef43(0);
23363
23557
  useEffect72(() => {
23558
+ if (canUseWorkerPath) {
23559
+ return;
23560
+ }
23364
23561
  const controller = new AbortController;
23562
+ setPeaks(null);
23365
23563
  setError(null);
23366
23564
  loadWaveformPeaks(src, controller.signal).then((p) => {
23367
23565
  if (!controller.signal.aborted) {
@@ -23373,30 +23571,127 @@ var AudioWaveform = ({
23373
23571
  }
23374
23572
  });
23375
23573
  return () => controller.abort();
23376
- }, [src]);
23574
+ }, [canUseWorkerPath, src]);
23575
+ useEffect72(() => {
23576
+ if (!canUseWorkerPath) {
23577
+ return;
23578
+ }
23579
+ const canvasElement = waveformCanvas.current;
23580
+ if (!canvasElement || hasTransferredCanvas.current) {
23581
+ return;
23582
+ }
23583
+ const worker = makeAudioWaveformWorker();
23584
+ waveformWorker.current = worker;
23585
+ worker.addEventListener("message", (event) => {
23586
+ if (event.data.type === "error") {
23587
+ if (event.data.requestId !== latestRequestId.current) {
23588
+ return;
23589
+ }
23590
+ setError(new Error(event.data.message));
23591
+ }
23592
+ });
23593
+ let offscreen;
23594
+ try {
23595
+ offscreen = canvasElement.transferControlToOffscreen();
23596
+ } catch (err) {
23597
+ worker.terminate();
23598
+ waveformWorker.current = null;
23599
+ if (canRetryCanvasTransfer(err)) {
23600
+ setWaveformCanvasKey((key4) => key4 + 1);
23601
+ return;
23602
+ }
23603
+ throw err;
23604
+ }
23605
+ hasTransferredCanvas.current = true;
23606
+ worker.postMessage({ type: "init", canvas: offscreen }, [offscreen]);
23607
+ return () => {
23608
+ worker.postMessage({ type: "dispose" });
23609
+ worker.terminate();
23610
+ waveformWorker.current = null;
23611
+ hasTransferredCanvas.current = false;
23612
+ };
23613
+ }, [canUseWorkerPath, waveformCanvasKey]);
23377
23614
  const portionPeaks = useMemo119(() => {
23378
- if (!peaks || peaks.length === 0) {
23615
+ if (canUseWorkerPath || !peaks) {
23379
23616
  return null;
23380
23617
  }
23381
- const startTimeInSeconds = startFrom / vidConf.fps;
23382
- const durationInSeconds = durationInFrames / vidConf.fps * playbackRate;
23383
- const startPeakIndex = Math.floor(startTimeInSeconds * TARGET_SAMPLE_RATE);
23384
- const endPeakIndex = Math.ceil((startTimeInSeconds + durationInSeconds) * TARGET_SAMPLE_RATE);
23385
- return peaks.slice(Math.max(0, startPeakIndex), Math.min(peaks.length, endPeakIndex));
23386
- }, [peaks, startFrom, durationInFrames, vidConf.fps, playbackRate]);
23618
+ return sliceWaveformPeaks({
23619
+ durationInFrames: shouldTileLoopDisplay(loopDisplay) ? loopDisplay.durationInFrames : durationInFrames,
23620
+ fps: vidConf.fps,
23621
+ peaks,
23622
+ playbackRate,
23623
+ startFrom
23624
+ });
23625
+ }, [
23626
+ canUseWorkerPath,
23627
+ durationInFrames,
23628
+ loopDisplay,
23629
+ peaks,
23630
+ playbackRate,
23631
+ startFrom,
23632
+ vidConf.fps
23633
+ ]);
23387
23634
  useEffect72(() => {
23388
23635
  const { current: canvasElement } = waveformCanvas;
23389
23636
  const { current: containerElement } = containerRef;
23390
- if (!canvasElement || !containerElement || !portionPeaks || portionPeaks.length === 0) {
23637
+ if (!canvasElement || !containerElement) {
23391
23638
  return;
23392
23639
  }
23393
23640
  const h = containerElement.clientHeight;
23394
23641
  const w = Math.ceil(visualizationWidth);
23642
+ const vol = typeof volume === "number" ? volume : 1;
23643
+ if (canUseWorkerPath) {
23644
+ const worker = waveformWorker.current;
23645
+ if (!worker || !hasTransferredCanvas.current) {
23646
+ return;
23647
+ }
23648
+ latestRequestId.current += 1;
23649
+ setError(null);
23650
+ const message = {
23651
+ type: "render",
23652
+ requestId: latestRequestId.current,
23653
+ src,
23654
+ width: w,
23655
+ height: h,
23656
+ volume: vol,
23657
+ startFrom,
23658
+ durationInFrames,
23659
+ fps: vidConf.fps,
23660
+ playbackRate,
23661
+ loopDisplay
23662
+ };
23663
+ worker.postMessage(message);
23664
+ return;
23665
+ }
23395
23666
  canvasElement.width = w;
23396
23667
  canvasElement.height = h;
23397
- const vol = typeof volume === "number" ? volume : 1;
23398
- drawBars(canvasElement, portionPeaks, "rgba(255, 255, 255, 0.6)", vol, w);
23399
- }, [portionPeaks, visualizationWidth, volume]);
23668
+ if (shouldTileLoopDisplay(loopDisplay)) {
23669
+ drawLoopedWaveform({
23670
+ canvas: canvasElement,
23671
+ peaks: portionPeaks ?? EMPTY_PEAKS,
23672
+ volume: vol,
23673
+ visualizationWidth,
23674
+ loopWidth: getLoopDisplayWidth({
23675
+ visualizationWidth,
23676
+ loopDisplay
23677
+ })
23678
+ });
23679
+ } else {
23680
+ drawBars(canvasElement, portionPeaks ?? EMPTY_PEAKS, "rgba(255, 255, 255, 0.6)", vol, w);
23681
+ }
23682
+ }, [
23683
+ canUseWorkerPath,
23684
+ durationInFrames,
23685
+ loopDisplay,
23686
+ playbackRate,
23687
+ portionPeaks,
23688
+ src,
23689
+ startFrom,
23690
+ vidConf.fps,
23691
+ visualizationWidth,
23692
+ volume,
23693
+ waveformCanvasKey
23694
+ ]);
23400
23695
  useEffect72(() => {
23401
23696
  const { current: volumeCanvasElement } = volumeCanvas;
23402
23697
  const { current: containerElement } = containerRef;
@@ -23430,6 +23725,7 @@ var AudioWaveform = ({
23430
23725
  context.stroke();
23431
23726
  }, [visualizationWidth, volume, doesVolumeChange]);
23432
23727
  if (error) {
23728
+ console.error(error);
23433
23729
  return /* @__PURE__ */ jsx209("div", {
23434
23730
  style: container42,
23435
23731
  children: /* @__PURE__ */ jsx209("div", {
@@ -23438,7 +23734,7 @@ var AudioWaveform = ({
23438
23734
  })
23439
23735
  });
23440
23736
  }
23441
- if (!peaks) {
23737
+ if (!canUseWorkerPath && !peaks) {
23442
23738
  return null;
23443
23739
  }
23444
23740
  return /* @__PURE__ */ jsxs101("div", {
@@ -23448,7 +23744,7 @@ var AudioWaveform = ({
23448
23744
  /* @__PURE__ */ jsx209("canvas", {
23449
23745
  ref: waveformCanvas,
23450
23746
  style: waveformCanvasStyle
23451
- }),
23747
+ }, waveformCanvasKey),
23452
23748
  /* @__PURE__ */ jsx209("canvas", {
23453
23749
  ref: volumeCanvas,
23454
23750
  style: volumeCanvasStyle
@@ -23471,7 +23767,8 @@ var width = {
23471
23767
  position: "relative"
23472
23768
  };
23473
23769
  var icon4 = {
23474
- height: 12
23770
+ height: 12,
23771
+ filter: "drop-shadow(0 0 2px rgba(0, 0, 0, 0.9)) drop-shadow(0 1px 2px rgba(0, 0, 0, 0.8))"
23475
23772
  };
23476
23773
  var Icon = () => /* @__PURE__ */ jsx210("svg", {
23477
23774
  viewBox: "0 0 512 512",
@@ -23481,44 +23778,23 @@ var Icon = () => /* @__PURE__ */ jsx210("svg", {
23481
23778
  d: "M512 256c0 88.224-71.775 160-160 160H170.067l34.512 32.419c9.875 9.276 10.119 24.883.539 34.464l-10.775 10.775c-9.373 9.372-24.568 9.372-33.941 0l-92.686-92.686c-9.373-9.373-9.373-24.568 0-33.941l92.686-92.686c9.373-9.373 24.568-9.373 33.941 0l10.775 10.775c9.581 9.581 9.337 25.187-.539 34.464L170.067 352H352c52.935 0 96-43.065 96-96 0-13.958-2.996-27.228-8.376-39.204-4.061-9.039-2.284-19.626 4.723-26.633l12.183-12.183c11.499-11.499 30.965-8.526 38.312 5.982C505.814 205.624 512 230.103 512 256zM72.376 295.204C66.996 283.228 64 269.958 64 256c0-52.935 43.065-96 96-96h181.933l-34.512 32.419c-9.875 9.276-10.119 24.883-.539 34.464l10.775 10.775c9.373 9.372 24.568 9.372 33.941 0l92.686-92.686c9.373-9.373 9.373-24.568 0-33.941l-92.686-92.686c-9.373-9.373-24.568-9.373-33.941 0L306.882 29.12c-9.581 9.581-9.337 25.187.539 34.464L341.933 96H160C71.775 96 0 167.776 0 256c0 25.897 6.186 50.376 17.157 72.039 7.347 14.508 26.813 17.481 38.312 5.982l12.183-12.183c7.008-7.008 8.786-17.595 4.724-26.634z"
23482
23779
  })
23483
23780
  });
23484
- var topLine = {
23485
- top: 0,
23486
- height: 2,
23487
- width: 1,
23488
- background: LIGHT_COLOR
23489
- };
23490
- var bottomLine = {
23491
- top: 0,
23492
- height: 2,
23781
+ var verticalLine = {
23782
+ height: "100%",
23493
23783
  width: 1,
23494
- background: LIGHT_COLOR
23495
- };
23496
- var topContainer = {
23497
- justifyContent: "flex-start",
23498
- alignItems: "center"
23784
+ background: "rgb(255,255,255, 0.5)"
23499
23785
  };
23500
23786
  var centerContainer = {
23501
23787
  justifyContent: "center",
23502
23788
  alignItems: "center"
23503
23789
  };
23504
- var bottomContainer = {
23505
- justifyContent: "flex-end",
23506
- alignItems: "center"
23507
- };
23508
23790
  var LoopedIndicator = () => {
23509
23791
  return /* @__PURE__ */ jsxs102("div", {
23510
23792
  style: width,
23511
23793
  children: [
23512
23794
  /* @__PURE__ */ jsx210(AbsoluteFill3, {
23513
- style: topContainer,
23514
- children: /* @__PURE__ */ jsx210("div", {
23515
- style: topLine
23516
- })
23517
- }),
23518
- /* @__PURE__ */ jsx210(AbsoluteFill3, {
23519
- style: bottomContainer,
23795
+ style: centerContainer,
23520
23796
  children: /* @__PURE__ */ jsx210("div", {
23521
- style: bottomLine
23797
+ style: verticalLine
23522
23798
  })
23523
23799
  }),
23524
23800
  /* @__PURE__ */ jsx210(AbsoluteFill3, {
@@ -23661,17 +23937,23 @@ async function extractFrames({
23661
23937
  }
23662
23938
  try {
23663
23939
  const [durationInSeconds, format, videoTrack] = await Promise.all([
23664
- input2.computeDuration(),
23940
+ getDurationOrCompute(input2),
23665
23941
  input2.getFormat(),
23666
23942
  input2.getPrimaryVideoTrack()
23667
23943
  ]);
23668
23944
  if (!videoTrack) {
23669
23945
  throw new Error("No video track found in the input");
23670
23946
  }
23947
+ if (await videoTrack.isLive()) {
23948
+ throw new Error("Live streams are not currently supported by Remotion. Sorry! Source: " + src);
23949
+ }
23950
+ if (await videoTrack.isRelativeToUnixEpoch()) {
23951
+ throw new Error("Streams with UNIX timestamps are not currently supported by Remotion. Sorry! Source: " + src);
23952
+ }
23671
23953
  const timestamps = typeof timestampsInSeconds === "function" ? await timestampsInSeconds({
23672
23954
  track: {
23673
- width: videoTrack.displayWidth,
23674
- height: videoTrack.displayHeight
23955
+ width: await videoTrack.getDisplayWidth(),
23956
+ height: await videoTrack.getDisplayHeight()
23675
23957
  },
23676
23958
  container: format.name,
23677
23959
  durationInSeconds
@@ -23990,7 +24272,8 @@ var TimelineVideoInfo = ({
23990
24272
  volume,
23991
24273
  doesVolumeChange,
23992
24274
  premountWidth,
23993
- postmountWidth
24275
+ postmountWidth,
24276
+ loopDisplay
23994
24277
  }) => {
23995
24278
  const { fps } = useVideoConfig5();
23996
24279
  const ref2 = useRef45(null);
@@ -24013,25 +24296,54 @@ var TimelineVideoInfo = ({
24013
24296
  return;
24014
24297
  }
24015
24298
  current.appendChild(canvas);
24299
+ const loopWidth = getLoopDisplayWidth({
24300
+ visualizationWidth: naturalWidth,
24301
+ loopDisplay
24302
+ });
24303
+ const shouldRepeatVideo = shouldTileLoopDisplay(loopDisplay);
24304
+ const targetCanvas = shouldRepeatVideo ? document.createElement("canvas") : canvas;
24305
+ targetCanvas.width = shouldRepeatVideo ? Math.max(1, Math.ceil(loopWidth)) : canvas.width;
24306
+ targetCanvas.height = canvas.height;
24307
+ const targetCtx = shouldRepeatVideo ? targetCanvas.getContext("2d") : ctx;
24308
+ if (!targetCtx) {
24309
+ current.removeChild(canvas);
24310
+ return;
24311
+ }
24312
+ const repeatTarget = () => {
24313
+ if (!shouldRepeatVideo) {
24314
+ return;
24315
+ }
24316
+ const pattern = ctx.createPattern(targetCanvas, "repeat-x");
24317
+ if (!pattern) {
24318
+ return;
24319
+ }
24320
+ pattern.setTransform(new DOMMatrix().scaleSelf(loopWidth / targetCanvas.width, 1));
24321
+ ctx.clearRect(0, 0, canvas.width, canvas.height);
24322
+ ctx.fillStyle = pattern;
24323
+ ctx.fillRect(0, 0, canvas.width, canvas.height);
24324
+ };
24016
24325
  const filledSlots = new Map;
24017
24326
  const fromSeconds = trimBefore / fps;
24018
- const toSeconds = fromSeconds + durationInFrames * playbackRate / fps;
24327
+ const visibleDurationInFrames = shouldRepeatVideo && loopDisplay ? loopDisplay.durationInFrames : durationInFrames;
24328
+ const toSeconds = fromSeconds + visibleDurationInFrames * playbackRate / fps;
24329
+ const targetWidth = shouldRepeatVideo ? targetCanvas.width : naturalWidth;
24019
24330
  if (aspectRatio.current !== null) {
24020
24331
  ensureSlots({
24021
24332
  filledSlots,
24022
- naturalWidth,
24333
+ naturalWidth: targetWidth,
24023
24334
  fromSeconds,
24024
24335
  toSeconds,
24025
24336
  aspectRatio: aspectRatio.current
24026
24337
  });
24027
24338
  fillWithCachedFrames({
24028
- ctx,
24029
- naturalWidth,
24339
+ ctx: targetCtx,
24340
+ naturalWidth: targetWidth,
24030
24341
  filledSlots,
24031
24342
  src,
24032
24343
  segmentDuration: toSeconds - fromSeconds,
24033
24344
  fromSeconds
24034
24345
  });
24346
+ repeatTarget();
24035
24347
  const unfilled = Array.from(filledSlots.keys()).filter((timestamp) => !filledSlots.get(timestamp));
24036
24348
  if (unfilled.length === 0) {
24037
24349
  return () => {
@@ -24049,7 +24361,7 @@ var TimelineVideoInfo = ({
24049
24361
  filledSlots,
24050
24362
  fromSeconds,
24051
24363
  toSeconds,
24052
- naturalWidth,
24364
+ naturalWidth: targetWidth,
24053
24365
  aspectRatio: aspectRatio.current
24054
24366
  });
24055
24367
  return Array.from(filledSlots.keys()).map((timestamp) => timestamp / WEBCODECS_TIMESCALE);
@@ -24077,17 +24389,18 @@ var TimelineVideoInfo = ({
24077
24389
  filledSlots,
24078
24390
  fromSeconds,
24079
24391
  toSeconds,
24080
- naturalWidth,
24392
+ naturalWidth: targetWidth,
24081
24393
  aspectRatio: aspectRatio.current
24082
24394
  });
24083
24395
  fillFrameWhereItFits({
24084
- ctx,
24396
+ ctx: targetCtx,
24085
24397
  filledSlots,
24086
- visualizationWidth: naturalWidth,
24398
+ visualizationWidth: targetWidth,
24087
24399
  frame: transformed,
24088
24400
  segmentDuration: toSeconds - fromSeconds,
24089
24401
  fromSeconds
24090
24402
  });
24403
+ repeatTarget();
24091
24404
  } catch (e) {
24092
24405
  if (frame2) {
24093
24406
  frame2.close();
@@ -24103,13 +24416,14 @@ var TimelineVideoInfo = ({
24103
24416
  return;
24104
24417
  }
24105
24418
  fillWithCachedFrames({
24106
- ctx,
24107
- naturalWidth,
24419
+ ctx: targetCtx,
24420
+ naturalWidth: targetWidth,
24108
24421
  filledSlots,
24109
24422
  src,
24110
24423
  segmentDuration: toSeconds - fromSeconds,
24111
24424
  fromSeconds
24112
24425
  });
24426
+ repeatTarget();
24113
24427
  }).catch((e) => {
24114
24428
  setError(e);
24115
24429
  });
@@ -24121,6 +24435,7 @@ var TimelineVideoInfo = ({
24121
24435
  durationInFrames,
24122
24436
  error,
24123
24437
  fps,
24438
+ loopDisplay,
24124
24439
  naturalWidth,
24125
24440
  playbackRate,
24126
24441
  src,
@@ -24152,7 +24467,8 @@ var TimelineVideoInfo = ({
24152
24467
  durationInFrames,
24153
24468
  volume,
24154
24469
  doesVolumeChange,
24155
- playbackRate
24470
+ playbackRate,
24471
+ loopDisplay
24156
24472
  })
24157
24473
  })
24158
24474
  ]
@@ -24177,29 +24493,37 @@ var TimelineSequence = ({ s }) => {
24177
24493
  var Inner4 = ({ s, windowWidth }) => {
24178
24494
  const video = Internals56.useVideo();
24179
24495
  const maxMediaDuration = useMaxMediaDuration(s, video?.fps ?? 30);
24496
+ const effectiveMaxMediaDuration = s.loopDisplay ? null : maxMediaDuration;
24180
24497
  if (!video) {
24181
24498
  throw new TypeError("Expected video config");
24182
24499
  }
24183
24500
  const frame2 = useCurrentFrame2();
24184
24501
  const relativeFrame = frame2 - s.from;
24502
+ const displayDurationInFrames = s.loopDisplay ? s.loopDisplay.durationInFrames * s.loopDisplay.numberOfTimes : s.duration;
24185
24503
  const relativeFrameWithPremount = relativeFrame + (s.premountDisplay ?? 0);
24186
- const relativeFrameWithPostmount = relativeFrame - s.duration;
24504
+ const relativeFrameWithPostmount = relativeFrame - displayDurationInFrames;
24187
24505
  const roundedFrame = Math.round(relativeFrame * 100) / 100;
24188
- const isInRange = relativeFrame >= 0 && relativeFrame < s.duration;
24189
- const isPremounting = relativeFrameWithPremount >= 0 && relativeFrameWithPremount < s.duration && !isInRange;
24506
+ const isInRange = relativeFrame >= 0 && relativeFrame < displayDurationInFrames;
24507
+ const isPremounting = relativeFrameWithPremount >= 0 && relativeFrameWithPremount < displayDurationInFrames && !isInRange;
24190
24508
  const isPostmounting = relativeFrameWithPostmount >= 0 && relativeFrameWithPostmount < (s.postmountDisplay ?? 0) && !isInRange;
24191
24509
  const { marginLeft, width: width2, naturalWidth, premountWidth, postmountWidth } = useMemo121(() => {
24192
24510
  return getTimelineSequenceLayout({
24193
- durationInFrames: s.loopDisplay ? s.loopDisplay.durationInFrames * s.loopDisplay.numberOfTimes : s.duration,
24511
+ durationInFrames: displayDurationInFrames,
24194
24512
  startFrom: s.loopDisplay ? s.from + s.loopDisplay.startOffset : s.from,
24195
24513
  startFromMedia: s.type === "sequence" || s.type === "image" ? 0 : s.startMediaFrom,
24196
- maxMediaDuration,
24514
+ maxMediaDuration: effectiveMaxMediaDuration,
24197
24515
  video,
24198
24516
  windowWidth,
24199
24517
  premountDisplay: s.premountDisplay,
24200
24518
  postmountDisplay: s.postmountDisplay
24201
24519
  });
24202
- }, [maxMediaDuration, s, video, windowWidth]);
24520
+ }, [
24521
+ displayDurationInFrames,
24522
+ effectiveMaxMediaDuration,
24523
+ s,
24524
+ video,
24525
+ windowWidth
24526
+ ]);
24203
24527
  const style11 = useMemo121(() => {
24204
24528
  return {
24205
24529
  background: s.type === "audio" ? AUDIO_GRADIENT : s.type === "video" ? VIDEO_GRADIENT : s.type === "image" ? IMAGE_GRADIENT : BLUE,
@@ -24214,7 +24538,7 @@ var Inner4 = ({ s, windowWidth }) => {
24214
24538
  opacity: isInRange ? 1 : 0.5
24215
24539
  };
24216
24540
  }, [isInRange, marginLeft, s.type, width2]);
24217
- if (maxMediaDuration === null) {
24541
+ if (maxMediaDuration === null && !s.loopDisplay) {
24218
24542
  return null;
24219
24543
  }
24220
24544
  return /* @__PURE__ */ jsxs105("div", {
@@ -24257,7 +24581,8 @@ var Inner4 = ({ s, windowWidth }) => {
24257
24581
  startFrom: s.startMediaFrom,
24258
24582
  durationInFrames: s.duration,
24259
24583
  volume: s.volume,
24260
- playbackRate: s.playbackRate
24584
+ playbackRate: s.playbackRate,
24585
+ loopDisplay: s.loopDisplay
24261
24586
  }) : null,
24262
24587
  s.type === "video" ? /* @__PURE__ */ jsx215(TimelineVideoInfo, {
24263
24588
  src: s.src,
@@ -24269,7 +24594,8 @@ var Inner4 = ({ s, windowWidth }) => {
24269
24594
  volume: s.volume,
24270
24595
  doesVolumeChange: s.doesVolumeChange,
24271
24596
  premountWidth: premountWidth ?? 0,
24272
- postmountWidth: postmountWidth ?? 0
24597
+ postmountWidth: postmountWidth ?? 0,
24598
+ loopDisplay: s.loopDisplay
24273
24599
  }) : null,
24274
24600
  s.type === "image" ? /* @__PURE__ */ jsx215(TimelineImageInfo, {
24275
24601
  src: s.src,