@remotion/studio 4.0.453 → 4.0.454

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -265,6 +265,8 @@ const AudioWaveform = ({ src, startFrom, durationInFrames, visualizationWidth, v
265
265
  context.stroke();
266
266
  }, [visualizationWidth, volume, doesVolumeChange]);
267
267
  if (error) {
268
+ // eslint-disable-next-line no-console
269
+ console.error(error);
268
270
  return (jsx_runtime_1.jsx("div", { style: container, children: jsx_runtime_1.jsx("div", { style: errorMessage, children: "No waveform available. Audio might not support CORS." }) }));
269
271
  }
270
272
  if (!canUseWorkerPath && !peaks) {
@@ -7,6 +7,7 @@ const mediabunny_1 = require("mediabunny");
7
7
  const react_1 = require("react");
8
8
  const remotion_1 = require("remotion");
9
9
  const colors_1 = require("../helpers/colors");
10
+ const get_duration_or_compute_1 = require("../helpers/get-duration-or-compute");
10
11
  const use_static_files_1 = require("./use-static-files");
11
12
  exports.CURRENT_ASSET_HEIGHT = 80;
12
13
  const container = {
@@ -72,17 +73,24 @@ const CurrentAsset = () => {
72
73
  source: new mediabunny_1.UrlSource(url),
73
74
  });
74
75
  Promise.all([
75
- input.computeDuration(),
76
+ (0, get_duration_or_compute_1.getDurationOrCompute)(input),
76
77
  input.getFormat(),
77
78
  input.getPrimaryVideoTrack(),
78
79
  ])
79
- .then(([duration, format, videoTrack]) => {
80
- var _a, _b;
80
+ .then(async ([duration, format, videoTrack]) => {
81
+ if (videoTrack && (await videoTrack.isLive())) {
82
+ throw new Error('Live streams are not currently supported by Remotion. Sorry! Source: ' +
83
+ url);
84
+ }
85
+ if (videoTrack && (await videoTrack.isRelativeToUnixEpoch())) {
86
+ throw new Error('Streams with UNIX timestamps are not currently supported by Remotion. Sorry! Source: ' +
87
+ url);
88
+ }
81
89
  setMediaMetadata({
82
90
  duration,
83
91
  format: format.name,
84
- width: (_a = videoTrack === null || videoTrack === void 0 ? void 0 : videoTrack.displayWidth) !== null && _a !== void 0 ? _a : null,
85
- height: (_b = videoTrack === null || videoTrack === void 0 ? void 0 : videoTrack.displayHeight) !== null && _b !== void 0 ? _b : null,
92
+ width: videoTrack ? await videoTrack.getDisplayWidth() : null,
93
+ height: videoTrack ? await videoTrack.getDisplayHeight() : null,
86
94
  });
87
95
  })
88
96
  .catch(() => {
@@ -9,7 +9,7 @@ exports.TARGET_SAMPLE_RATE = TARGET_SAMPLE_RATE;
9
9
  const DEFAULT_PROGRESS_INTERVAL_IN_MS = 50;
10
10
  const peaksCache = new Map();
11
11
  async function loadWaveformPeaks(url, signal, options) {
12
- var _a;
12
+ var _a, _b;
13
13
  const cached = peaksCache.get(url);
14
14
  if (cached) {
15
15
  (0, waveform_peak_processor_1.emitWaveformProgress)({
@@ -30,8 +30,16 @@ async function loadWaveformPeaks(url, signal, options) {
30
30
  if (!audioTrack) {
31
31
  return new Float32Array(0);
32
32
  }
33
- const { sampleRate } = audioTrack;
34
- const durationInSeconds = await audioTrack.computeDuration();
33
+ if (await audioTrack.isLive()) {
34
+ throw new Error('Live streams are not currently supported by Remotion. Sorry! Source: ' +
35
+ url);
36
+ }
37
+ if (await audioTrack.isRelativeToUnixEpoch()) {
38
+ throw new Error('Streams with UNIX timestamps are not currently supported by Remotion. Sorry! Source: ' +
39
+ url);
40
+ }
41
+ const sampleRate = await audioTrack.getSampleRate();
42
+ const durationInSeconds = (_a = (await audioTrack.getDurationFromMetadata({ skipLiveWait: true }))) !== null && _a !== void 0 ? _a : (await audioTrack.computeDuration({ skipLiveWait: true }));
35
43
  const totalPeaks = Math.ceil(durationInSeconds * TARGET_SAMPLE_RATE);
36
44
  const samplesPerPeak = Math.max(1, Math.floor(sampleRate / TARGET_SAMPLE_RATE));
37
45
  const sink = new mediabunny_1.AudioSampleSink(audioTrack);
@@ -39,7 +47,7 @@ async function loadWaveformPeaks(url, signal, options) {
39
47
  totalPeaks,
40
48
  samplesPerPeak,
41
49
  onProgress: options === null || options === void 0 ? void 0 : options.onProgress,
42
- progressIntervalInMs: (_a = options === null || options === void 0 ? void 0 : options.progressIntervalInMs) !== null && _a !== void 0 ? _a : DEFAULT_PROGRESS_INTERVAL_IN_MS,
50
+ progressIntervalInMs: (_b = options === null || options === void 0 ? void 0 : options.progressIntervalInMs) !== null && _b !== void 0 ? _b : DEFAULT_PROGRESS_INTERVAL_IN_MS,
43
51
  now: () => Date.now(),
44
52
  });
45
53
  for await (const sample of sink.samples()) {
@@ -183,8 +183,14 @@ async function loadWaveformPeaks(url, signal, options) {
183
183
  if (!audioTrack) {
184
184
  return new Float32Array(0);
185
185
  }
186
- const { sampleRate } = audioTrack;
187
- const durationInSeconds = await audioTrack.computeDuration();
186
+ if (await audioTrack.isLive()) {
187
+ throw new Error("Live streams are not currently supported by Remotion. Sorry! Source: " + url);
188
+ }
189
+ if (await audioTrack.isRelativeToUnixEpoch()) {
190
+ throw new Error("Streams with UNIX timestamps are not currently supported by Remotion. Sorry! Source: " + url);
191
+ }
192
+ const sampleRate = await audioTrack.getSampleRate();
193
+ const durationInSeconds = await audioTrack.getDurationFromMetadata({ skipLiveWait: true }) ?? await audioTrack.computeDuration({ skipLiveWait: true });
188
194
  const totalPeaks = Math.ceil(durationInSeconds * TARGET_SAMPLE_RATE);
189
195
  const samplesPerPeak = Math.max(1, Math.floor(sampleRate / TARGET_SAMPLE_RATE));
190
196
  const sink = new AudioSampleSink(audioTrack);
@@ -4366,6 +4366,13 @@ import { ALL_FORMATS, Input, UrlSource } from "mediabunny";
4366
4366
  import { useContext as useContext12, useEffect as useEffect14, useMemo as useMemo24, useState as useState17 } from "react";
4367
4367
  import { Internals as Internals9, staticFile } from "remotion";
4368
4368
 
4369
+ // src/helpers/get-duration-or-compute.ts
4370
+ var getDurationOrCompute = async (input) => {
4371
+ return await input.getDurationFromMetadata(undefined, {
4372
+ skipLiveWait: true
4373
+ }) ?? input.computeDuration(undefined, { skipLiveWait: true });
4374
+ };
4375
+
4369
4376
  // src/components/use-static-files.ts
4370
4377
  import React26, { createContext as createContext10, useContext as useContext11, useEffect as useEffect13, useState as useState16 } from "react";
4371
4378
  import { useRemotionEnvironment } from "remotion";
@@ -4509,15 +4516,21 @@ var CurrentAsset = () => {
4509
4516
  source: new UrlSource(url)
4510
4517
  });
4511
4518
  Promise.all([
4512
- input.computeDuration(),
4519
+ getDurationOrCompute(input),
4513
4520
  input.getFormat(),
4514
4521
  input.getPrimaryVideoTrack()
4515
- ]).then(([duration, format, videoTrack]) => {
4522
+ ]).then(async ([duration, format, videoTrack]) => {
4523
+ if (videoTrack && await videoTrack.isLive()) {
4524
+ throw new Error("Live streams are not currently supported by Remotion. Sorry! Source: " + url);
4525
+ }
4526
+ if (videoTrack && await videoTrack.isRelativeToUnixEpoch()) {
4527
+ throw new Error("Streams with UNIX timestamps are not currently supported by Remotion. Sorry! Source: " + url);
4528
+ }
4516
4529
  setMediaMetadata({
4517
4530
  duration,
4518
4531
  format: format.name,
4519
- width: videoTrack?.displayWidth ?? null,
4520
- height: videoTrack?.displayHeight ?? null
4532
+ width: videoTrack ? await videoTrack.getDisplayWidth() : null,
4533
+ height: videoTrack ? await videoTrack.getDisplayHeight() : null
4521
4534
  });
4522
4535
  }).catch(() => {});
4523
4536
  return () => {
@@ -23140,7 +23153,7 @@ var useMaxMediaDuration = (s, fps) => {
23140
23153
  formats: ALL_FORMATS2,
23141
23154
  source: new UrlSource2(src)
23142
23155
  });
23143
- input2.computeDuration().then((duration) => {
23156
+ getDurationOrCompute(input2).then((duration) => {
23144
23157
  cache.set(src, Math.floor(duration * fps));
23145
23158
  setMaxMediaDuration(Math.floor(duration * fps));
23146
23159
  }).catch((e) => {
@@ -23359,8 +23372,14 @@ async function loadWaveformPeaks(url, signal, options) {
23359
23372
  if (!audioTrack) {
23360
23373
  return new Float32Array(0);
23361
23374
  }
23362
- const { sampleRate } = audioTrack;
23363
- const durationInSeconds = await audioTrack.computeDuration();
23375
+ if (await audioTrack.isLive()) {
23376
+ throw new Error("Live streams are not currently supported by Remotion. Sorry! Source: " + url);
23377
+ }
23378
+ if (await audioTrack.isRelativeToUnixEpoch()) {
23379
+ throw new Error("Streams with UNIX timestamps are not currently supported by Remotion. Sorry! Source: " + url);
23380
+ }
23381
+ const sampleRate = await audioTrack.getSampleRate();
23382
+ const durationInSeconds = await audioTrack.getDurationFromMetadata({ skipLiveWait: true }) ?? await audioTrack.computeDuration({ skipLiveWait: true });
23364
23383
  const totalPeaks = Math.ceil(durationInSeconds * TARGET_SAMPLE_RATE);
23365
23384
  const samplesPerPeak = Math.max(1, Math.floor(sampleRate / TARGET_SAMPLE_RATE));
23366
23385
  const sink = new AudioSampleSink(audioTrack);
@@ -23687,6 +23706,7 @@ var AudioWaveform = ({
23687
23706
  context.stroke();
23688
23707
  }, [visualizationWidth, volume, doesVolumeChange]);
23689
23708
  if (error) {
23709
+ console.error(error);
23690
23710
  return /* @__PURE__ */ jsx209("div", {
23691
23711
  style: container42,
23692
23712
  children: /* @__PURE__ */ jsx209("div", {
@@ -23898,17 +23918,23 @@ async function extractFrames({
23898
23918
  }
23899
23919
  try {
23900
23920
  const [durationInSeconds, format, videoTrack] = await Promise.all([
23901
- input2.computeDuration(),
23921
+ getDurationOrCompute(input2),
23902
23922
  input2.getFormat(),
23903
23923
  input2.getPrimaryVideoTrack()
23904
23924
  ]);
23905
23925
  if (!videoTrack) {
23906
23926
  throw new Error("No video track found in the input");
23907
23927
  }
23928
+ if (await videoTrack.isLive()) {
23929
+ throw new Error("Live streams are not currently supported by Remotion. Sorry! Source: " + src);
23930
+ }
23931
+ if (await videoTrack.isRelativeToUnixEpoch()) {
23932
+ throw new Error("Streams with UNIX timestamps are not currently supported by Remotion. Sorry! Source: " + src);
23933
+ }
23908
23934
  const timestamps = typeof timestampsInSeconds === "function" ? await timestampsInSeconds({
23909
23935
  track: {
23910
- width: videoTrack.displayWidth,
23911
- height: videoTrack.displayHeight
23936
+ width: await videoTrack.getDisplayWidth(),
23937
+ height: await videoTrack.getDisplayHeight()
23912
23938
  },
23913
23939
  container: format.name,
23914
23940
  durationInSeconds
@@ -4385,6 +4385,13 @@ import { ALL_FORMATS, Input, UrlSource } from "mediabunny";
4385
4385
  import { useContext as useContext12, useEffect as useEffect14, useMemo as useMemo24, useState as useState17 } from "react";
4386
4386
  import { Internals as Internals9, staticFile } from "remotion";
4387
4387
 
4388
+ // src/helpers/get-duration-or-compute.ts
4389
+ var getDurationOrCompute = async (input) => {
4390
+ return await input.getDurationFromMetadata(undefined, {
4391
+ skipLiveWait: true
4392
+ }) ?? input.computeDuration(undefined, { skipLiveWait: true });
4393
+ };
4394
+
4388
4395
  // src/components/use-static-files.ts
4389
4396
  import React26, { createContext as createContext10, useContext as useContext11, useEffect as useEffect13, useState as useState16 } from "react";
4390
4397
  import { useRemotionEnvironment } from "remotion";
@@ -4528,15 +4535,21 @@ var CurrentAsset = () => {
4528
4535
  source: new UrlSource(url)
4529
4536
  });
4530
4537
  Promise.all([
4531
- input.computeDuration(),
4538
+ getDurationOrCompute(input),
4532
4539
  input.getFormat(),
4533
4540
  input.getPrimaryVideoTrack()
4534
- ]).then(([duration, format, videoTrack]) => {
4541
+ ]).then(async ([duration, format, videoTrack]) => {
4542
+ if (videoTrack && await videoTrack.isLive()) {
4543
+ throw new Error("Live streams are not currently supported by Remotion. Sorry! Source: " + url);
4544
+ }
4545
+ if (videoTrack && await videoTrack.isRelativeToUnixEpoch()) {
4546
+ throw new Error("Streams with UNIX timestamps are not currently supported by Remotion. Sorry! Source: " + url);
4547
+ }
4535
4548
  setMediaMetadata({
4536
4549
  duration,
4537
4550
  format: format.name,
4538
- width: videoTrack?.displayWidth ?? null,
4539
- height: videoTrack?.displayHeight ?? null
4551
+ width: videoTrack ? await videoTrack.getDisplayWidth() : null,
4552
+ height: videoTrack ? await videoTrack.getDisplayHeight() : null
4540
4553
  });
4541
4554
  }).catch(() => {});
4542
4555
  return () => {
@@ -23159,7 +23172,7 @@ var useMaxMediaDuration = (s, fps) => {
23159
23172
  formats: ALL_FORMATS2,
23160
23173
  source: new UrlSource2(src)
23161
23174
  });
23162
- input2.computeDuration().then((duration) => {
23175
+ getDurationOrCompute(input2).then((duration) => {
23163
23176
  cache.set(src, Math.floor(duration * fps));
23164
23177
  setMaxMediaDuration(Math.floor(duration * fps));
23165
23178
  }).catch((e) => {
@@ -23378,8 +23391,14 @@ async function loadWaveformPeaks(url, signal, options) {
23378
23391
  if (!audioTrack) {
23379
23392
  return new Float32Array(0);
23380
23393
  }
23381
- const { sampleRate } = audioTrack;
23382
- const durationInSeconds = await audioTrack.computeDuration();
23394
+ if (await audioTrack.isLive()) {
23395
+ throw new Error("Live streams are not currently supported by Remotion. Sorry! Source: " + url);
23396
+ }
23397
+ if (await audioTrack.isRelativeToUnixEpoch()) {
23398
+ throw new Error("Streams with UNIX timestamps are not currently supported by Remotion. Sorry! Source: " + url);
23399
+ }
23400
+ const sampleRate = await audioTrack.getSampleRate();
23401
+ const durationInSeconds = await audioTrack.getDurationFromMetadata({ skipLiveWait: true }) ?? await audioTrack.computeDuration({ skipLiveWait: true });
23383
23402
  const totalPeaks = Math.ceil(durationInSeconds * TARGET_SAMPLE_RATE);
23384
23403
  const samplesPerPeak = Math.max(1, Math.floor(sampleRate / TARGET_SAMPLE_RATE));
23385
23404
  const sink = new AudioSampleSink(audioTrack);
@@ -23706,6 +23725,7 @@ var AudioWaveform = ({
23706
23725
  context.stroke();
23707
23726
  }, [visualizationWidth, volume, doesVolumeChange]);
23708
23727
  if (error) {
23728
+ console.error(error);
23709
23729
  return /* @__PURE__ */ jsx209("div", {
23710
23730
  style: container42,
23711
23731
  children: /* @__PURE__ */ jsx209("div", {
@@ -23917,17 +23937,23 @@ async function extractFrames({
23917
23937
  }
23918
23938
  try {
23919
23939
  const [durationInSeconds, format, videoTrack] = await Promise.all([
23920
- input2.computeDuration(),
23940
+ getDurationOrCompute(input2),
23921
23941
  input2.getFormat(),
23922
23942
  input2.getPrimaryVideoTrack()
23923
23943
  ]);
23924
23944
  if (!videoTrack) {
23925
23945
  throw new Error("No video track found in the input");
23926
23946
  }
23947
+ if (await videoTrack.isLive()) {
23948
+ throw new Error("Live streams are not currently supported by Remotion. Sorry! Source: " + src);
23949
+ }
23950
+ if (await videoTrack.isRelativeToUnixEpoch()) {
23951
+ throw new Error("Streams with UNIX timestamps are not currently supported by Remotion. Sorry! Source: " + src);
23952
+ }
23927
23953
  const timestamps = typeof timestampsInSeconds === "function" ? await timestampsInSeconds({
23928
23954
  track: {
23929
- width: videoTrack.displayWidth,
23930
- height: videoTrack.displayHeight
23955
+ width: await videoTrack.getDisplayWidth(),
23956
+ height: await videoTrack.getDisplayHeight()
23931
23957
  },
23932
23958
  container: format.name,
23933
23959
  durationInSeconds
@@ -6615,6 +6615,13 @@ import { ALL_FORMATS, Input, UrlSource } from "mediabunny";
6615
6615
  import { useContext as useContext12, useEffect as useEffect22, useMemo as useMemo29, useState as useState24 } from "react";
6616
6616
  import { Internals as Internals11, staticFile } from "remotion";
6617
6617
 
6618
+ // src/helpers/get-duration-or-compute.ts
6619
+ var getDurationOrCompute = async (input) => {
6620
+ return await input.getDurationFromMetadata(undefined, {
6621
+ skipLiveWait: true
6622
+ }) ?? input.computeDuration(undefined, { skipLiveWait: true });
6623
+ };
6624
+
6618
6625
  // src/components/use-static-files.ts
6619
6626
  import React40, { createContext as createContext10, useContext as useContext11, useEffect as useEffect21, useState as useState23 } from "react";
6620
6627
  import { useRemotionEnvironment } from "remotion";
@@ -6758,15 +6765,21 @@ var CurrentAsset = () => {
6758
6765
  source: new UrlSource(url)
6759
6766
  });
6760
6767
  Promise.all([
6761
- input.computeDuration(),
6768
+ getDurationOrCompute(input),
6762
6769
  input.getFormat(),
6763
6770
  input.getPrimaryVideoTrack()
6764
- ]).then(([duration, format, videoTrack]) => {
6771
+ ]).then(async ([duration, format, videoTrack]) => {
6772
+ if (videoTrack && await videoTrack.isLive()) {
6773
+ throw new Error("Live streams are not currently supported by Remotion. Sorry! Source: " + url);
6774
+ }
6775
+ if (videoTrack && await videoTrack.isRelativeToUnixEpoch()) {
6776
+ throw new Error("Streams with UNIX timestamps are not currently supported by Remotion. Sorry! Source: " + url);
6777
+ }
6765
6778
  setMediaMetadata({
6766
6779
  duration,
6767
6780
  format: format.name,
6768
- width: videoTrack?.displayWidth ?? null,
6769
- height: videoTrack?.displayHeight ?? null
6781
+ width: videoTrack ? await videoTrack.getDisplayWidth() : null,
6782
+ height: videoTrack ? await videoTrack.getDisplayHeight() : null
6770
6783
  });
6771
6784
  }).catch(() => {});
6772
6785
  return () => {
@@ -23751,7 +23764,7 @@ var useMaxMediaDuration = (s, fps) => {
23751
23764
  formats: ALL_FORMATS2,
23752
23765
  source: new UrlSource2(src)
23753
23766
  });
23754
- input2.computeDuration().then((duration) => {
23767
+ getDurationOrCompute(input2).then((duration) => {
23755
23768
  cache.set(src, Math.floor(duration * fps));
23756
23769
  setMaxMediaDuration(Math.floor(duration * fps));
23757
23770
  }).catch((e) => {
@@ -23970,8 +23983,14 @@ async function loadWaveformPeaks(url, signal, options) {
23970
23983
  if (!audioTrack) {
23971
23984
  return new Float32Array(0);
23972
23985
  }
23973
- const { sampleRate } = audioTrack;
23974
- const durationInSeconds = await audioTrack.computeDuration();
23986
+ if (await audioTrack.isLive()) {
23987
+ throw new Error("Live streams are not currently supported by Remotion. Sorry! Source: " + url);
23988
+ }
23989
+ if (await audioTrack.isRelativeToUnixEpoch()) {
23990
+ throw new Error("Streams with UNIX timestamps are not currently supported by Remotion. Sorry! Source: " + url);
23991
+ }
23992
+ const sampleRate = await audioTrack.getSampleRate();
23993
+ const durationInSeconds = await audioTrack.getDurationFromMetadata({ skipLiveWait: true }) ?? await audioTrack.computeDuration({ skipLiveWait: true });
23975
23994
  const totalPeaks = Math.ceil(durationInSeconds * TARGET_SAMPLE_RATE);
23976
23995
  const samplesPerPeak = Math.max(1, Math.floor(sampleRate / TARGET_SAMPLE_RATE));
23977
23996
  const sink = new AudioSampleSink(audioTrack);
@@ -24298,6 +24317,7 @@ var AudioWaveform = ({
24298
24317
  context.stroke();
24299
24318
  }, [visualizationWidth, volume, doesVolumeChange]);
24300
24319
  if (error) {
24320
+ console.error(error);
24301
24321
  return /* @__PURE__ */ jsx212("div", {
24302
24322
  style: container43,
24303
24323
  children: /* @__PURE__ */ jsx212("div", {
@@ -24509,17 +24529,23 @@ async function extractFrames({
24509
24529
  }
24510
24530
  try {
24511
24531
  const [durationInSeconds, format, videoTrack] = await Promise.all([
24512
- input2.computeDuration(),
24532
+ getDurationOrCompute(input2),
24513
24533
  input2.getFormat(),
24514
24534
  input2.getPrimaryVideoTrack()
24515
24535
  ]);
24516
24536
  if (!videoTrack) {
24517
24537
  throw new Error("No video track found in the input");
24518
24538
  }
24539
+ if (await videoTrack.isLive()) {
24540
+ throw new Error("Live streams are not currently supported by Remotion. Sorry! Source: " + src);
24541
+ }
24542
+ if (await videoTrack.isRelativeToUnixEpoch()) {
24543
+ throw new Error("Streams with UNIX timestamps are not currently supported by Remotion. Sorry! Source: " + src);
24544
+ }
24519
24545
  const timestamps = typeof timestampsInSeconds === "function" ? await timestampsInSeconds({
24520
24546
  track: {
24521
- width: videoTrack.displayWidth,
24522
- height: videoTrack.displayHeight
24547
+ width: await videoTrack.getDisplayWidth(),
24548
+ height: await videoTrack.getDisplayHeight()
24523
24549
  },
24524
24550
  container: format.name,
24525
24551
  durationInSeconds
@@ -209,7 +209,7 @@ var renderContent = (Root) => {
209
209
  renderToDOM(/* @__PURE__ */ jsx("div", {
210
210
  children: /* @__PURE__ */ jsx(DelayedSpinner, {})
211
211
  }));
212
- import("./chunk-hn4803e7.js").then(({ StudioInternals }) => {
212
+ import("./chunk-g39hwn0a.js").then(({ StudioInternals }) => {
213
213
  window.remotion_isStudio = true;
214
214
  window.remotion_isReadOnlyStudio = true;
215
215
  window.remotion_inputProps = "{}";
@@ -2,6 +2,7 @@
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
3
  exports.extractFrames = extractFrames;
4
4
  const mediabunny_1 = require("mediabunny");
5
+ const get_duration_or_compute_1 = require("./get-duration-or-compute");
5
6
  async function extractFrames({ src, timestampsInSeconds, onVideoSample, signal, }) {
6
7
  var _a;
7
8
  const input = new mediabunny_1.Input({
@@ -16,18 +17,26 @@ async function extractFrames({ src, timestampsInSeconds, onVideoSample, signal,
16
17
  }
17
18
  try {
18
19
  const [durationInSeconds, format, videoTrack] = await Promise.all([
19
- input.computeDuration(),
20
+ (0, get_duration_or_compute_1.getDurationOrCompute)(input),
20
21
  input.getFormat(),
21
22
  input.getPrimaryVideoTrack(),
22
23
  ]);
23
24
  if (!videoTrack) {
24
25
  throw new Error('No video track found in the input');
25
26
  }
27
+ if (await videoTrack.isLive()) {
28
+ throw new Error('Live streams are not currently supported by Remotion. Sorry! Source: ' +
29
+ src);
30
+ }
31
+ if (await videoTrack.isRelativeToUnixEpoch()) {
32
+ throw new Error('Streams with UNIX timestamps are not currently supported by Remotion. Sorry! Source: ' +
33
+ src);
34
+ }
26
35
  const timestamps = typeof timestampsInSeconds === 'function'
27
36
  ? await timestampsInSeconds({
28
37
  track: {
29
- width: videoTrack.displayWidth,
30
- height: videoTrack.displayHeight,
38
+ width: await videoTrack.getDisplayWidth(),
39
+ height: await videoTrack.getDisplayHeight(),
31
40
  },
32
41
  container: format.name,
33
42
  durationInSeconds,
@@ -0,0 +1,2 @@
1
+ import type { Input } from 'mediabunny';
2
+ export declare const getDurationOrCompute: (input: Input<import("mediabunny").Source>) => Promise<number>;
@@ -0,0 +1,10 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.getDurationOrCompute = void 0;
4
+ const getDurationOrCompute = async (input) => {
5
+ var _a;
6
+ return ((_a = (await input.getDurationFromMetadata(undefined, {
7
+ skipLiveWait: true,
8
+ }))) !== null && _a !== void 0 ? _a : input.computeDuration(undefined, { skipLiveWait: true }));
9
+ };
10
+ exports.getDurationOrCompute = getDurationOrCompute;
@@ -4,6 +4,7 @@ exports.useMaxMediaDuration = void 0;
4
4
  const media_utils_1 = require("@remotion/media-utils");
5
5
  const mediabunny_1 = require("mediabunny");
6
6
  const react_1 = require("react");
7
+ const get_duration_or_compute_1 = require("./get-duration-or-compute");
7
8
  const cache = new Map();
8
9
  const getSrc = (s) => {
9
10
  if (s.type === 'video') {
@@ -26,8 +27,7 @@ const useMaxMediaDuration = (s, fps) => {
26
27
  formats: mediabunny_1.ALL_FORMATS,
27
28
  source: new mediabunny_1.UrlSource(src),
28
29
  });
29
- input
30
- .computeDuration()
30
+ (0, get_duration_or_compute_1.getDurationOrCompute)(input)
31
31
  .then((duration) => {
32
32
  cache.set(src, Math.floor(duration * fps));
33
33
  setMaxMediaDuration(Math.floor(duration * fps));
package/package.json CHANGED
@@ -3,7 +3,7 @@
3
3
  "url": "https://github.com/remotion-dev/remotion/tree/main/packages/studio"
4
4
  },
5
5
  "name": "@remotion/studio",
6
- "version": "4.0.453",
6
+ "version": "4.0.454",
7
7
  "description": "APIs for interacting with the Remotion Studio",
8
8
  "main": "dist",
9
9
  "sideEffects": false,
@@ -26,14 +26,14 @@
26
26
  },
27
27
  "dependencies": {
28
28
  "semver": "7.5.3",
29
- "remotion": "4.0.453",
30
- "@remotion/player": "4.0.453",
31
- "@remotion/media-utils": "4.0.453",
32
- "@remotion/renderer": "4.0.453",
33
- "@remotion/web-renderer": "4.0.453",
34
- "@remotion/studio-shared": "4.0.453",
35
- "@remotion/zod-types": "4.0.453",
36
- "mediabunny": "1.39.2",
29
+ "remotion": "4.0.454",
30
+ "@remotion/player": "4.0.454",
31
+ "@remotion/media-utils": "4.0.454",
32
+ "@remotion/renderer": "4.0.454",
33
+ "@remotion/web-renderer": "4.0.454",
34
+ "@remotion/studio-shared": "4.0.454",
35
+ "@remotion/zod-types": "4.0.454",
36
+ "mediabunny": "1.42.0",
37
37
  "memfs": "3.4.3",
38
38
  "source-map": "0.7.3",
39
39
  "open": "^8.4.2",
@@ -43,7 +43,7 @@
43
43
  "react": "19.2.3",
44
44
  "react-dom": "19.2.3",
45
45
  "@types/semver": "^7.3.4",
46
- "@remotion/eslint-config-internal": "4.0.453",
46
+ "@remotion/eslint-config-internal": "4.0.454",
47
47
  "eslint": "9.19.0",
48
48
  "@typescript/native-preview": "7.0.0-dev.20260217.1"
49
49
  },