@remotion/studio 4.0.325 → 4.0.329

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -22529,6 +22529,24 @@ var findTrackStartTimeInSeconds = ({
22529
22529
  }
22530
22530
  return dwellTime / movieTimeScale;
22531
22531
  };
22532
+ var findTrackMediaTimeOffsetInTrackTimescale = ({
22533
+ trakBox
22534
+ }) => {
22535
+ const elstBox = getElstBox(trakBox);
22536
+ if (!elstBox) {
22537
+ return 0;
22538
+ }
22539
+ const { entries } = elstBox;
22540
+ let dwellTime = 0;
22541
+ for (const entry of entries) {
22542
+ const { mediaTime } = entry;
22543
+ if (mediaTime === -1) {
22544
+ continue;
22545
+ }
22546
+ dwellTime += mediaTime;
22547
+ }
22548
+ return dwellTime;
22549
+ };
22532
22550
  var WEBCODECS_TIMESCALE = 1e6;
22533
22551
  var MEDIA_PARSER_RIFF_TIMESCALE = 1e6;
22534
22552
  var isRiffAvi2 = (structure) => {
@@ -22580,7 +22598,8 @@ var makeAviAudioTrack = ({
22580
22598
  originalTimescale: MEDIA_PARSER_RIFF_TIMESCALE,
22581
22599
  trackId: index,
22582
22600
  startInSeconds: 0,
22583
- timescale: WEBCODECS_TIMESCALE
22601
+ timescale: WEBCODECS_TIMESCALE,
22602
+ trackMediaTimeOffsetInTrackTimescale: 0
22584
22603
  };
22585
22604
  };
22586
22605
  var makeAviVideoTrack = ({
@@ -22625,7 +22644,8 @@ var makeAviVideoTrack = ({
22625
22644
  },
22626
22645
  fps: strh.rate / strh.scale,
22627
22646
  startInSeconds: 0,
22628
- timescale: WEBCODECS_TIMESCALE
22647
+ timescale: WEBCODECS_TIMESCALE,
22648
+ trackMediaTimeOffsetInTrackTimescale: 0
22629
22649
  };
22630
22650
  };
22631
22651
  var getTracksFromAvi = (structure, state) => {
@@ -23405,7 +23425,8 @@ var getTrack = ({
23405
23425
  codecEnum,
23406
23426
  fps: null,
23407
23427
  startInSeconds: 0,
23408
- timescale: WEBCODECS_TIMESCALE
23428
+ timescale: WEBCODECS_TIMESCALE,
23429
+ trackMediaTimeOffsetInTrackTimescale: 0
23409
23430
  };
23410
23431
  }
23411
23432
  if (trackTypeToString(trackType2.value.value) === "audio") {
@@ -23429,7 +23450,8 @@ var getTrack = ({
23429
23450
  track
23430
23451
  }),
23431
23452
  startInSeconds: 0,
23432
- timescale: WEBCODECS_TIMESCALE
23453
+ timescale: WEBCODECS_TIMESCALE,
23454
+ trackMediaTimeOffsetInTrackTimescale: 0
23433
23455
  };
23434
23456
  }
23435
23457
  return null;
@@ -24205,6 +24227,9 @@ var getVideoCodecString = (trakBox) => {
24205
24227
  }
24206
24228
  return videoSample.format;
24207
24229
  };
24230
+ var normalizeVideoRotation = (rotation) => {
24231
+ return (rotation % 360 + 360) % 360;
24232
+ };
24208
24233
  var getActualDecoderParameters = ({
24209
24234
  audioCodec,
24210
24235
  codecPrivate: codecPrivate2,
@@ -24316,7 +24341,10 @@ var makeBaseMediaTrack = (trakBox, startTimeInSeconds) => {
24316
24341
  codecData: actual.codecPrivate,
24317
24342
  codecEnum,
24318
24343
  startInSeconds: startTimeInSeconds,
24319
- timescale: WEBCODECS_TIMESCALE
24344
+ timescale: WEBCODECS_TIMESCALE,
24345
+ trackMediaTimeOffsetInTrackTimescale: findTrackMediaTimeOffsetInTrackTimescale({
24346
+ trakBox
24347
+ })
24320
24348
  };
24321
24349
  }
24322
24350
  if (!trakBoxContainsVideo(trakBox)) {
@@ -24326,7 +24354,10 @@ var makeBaseMediaTrack = (trakBox, startTimeInSeconds) => {
24326
24354
  originalTimescale: timescaleAndDuration.timescale,
24327
24355
  trakBox,
24328
24356
  startInSeconds: startTimeInSeconds,
24329
- timescale: WEBCODECS_TIMESCALE
24357
+ timescale: WEBCODECS_TIMESCALE,
24358
+ trackMediaTimeOffsetInTrackTimescale: findTrackMediaTimeOffsetInTrackTimescale({
24359
+ trakBox
24360
+ })
24330
24361
  };
24331
24362
  }
24332
24363
  const videoSample = getStsdVideoConfig(trakBox);
@@ -24368,14 +24399,17 @@ var makeBaseMediaTrack = (trakBox, startTimeInSeconds) => {
24368
24399
  codedHeight: videoSample.height,
24369
24400
  displayAspectWidth,
24370
24401
  displayAspectHeight,
24371
- rotation,
24402
+ rotation: normalizeVideoRotation(0 - rotation),
24372
24403
  codecData: privateData,
24373
24404
  colorSpace: mediaParserAdvancedColorToWebCodecsColor(advancedColor),
24374
24405
  advancedColor,
24375
24406
  codecEnum: getVideoCodecFromIsoTrak(trakBox),
24376
24407
  fps: getFpsFromMp4TrakBox(trakBox),
24377
24408
  startInSeconds: startTimeInSeconds,
24378
- timescale: WEBCODECS_TIMESCALE
24409
+ timescale: WEBCODECS_TIMESCALE,
24410
+ trackMediaTimeOffsetInTrackTimescale: findTrackMediaTimeOffsetInTrackTimescale({
24411
+ trakBox
24412
+ })
24379
24413
  };
24380
24414
  return track;
24381
24415
  };
@@ -29050,7 +29084,8 @@ var handleAvcPacket = async ({
29050
29084
  colorSpace: mediaParserAdvancedColorToWebCodecsColor(advancedColor),
29051
29085
  advancedColor,
29052
29086
  startInSeconds: 0,
29053
- timescale: WEBCODECS_TIMESCALE
29087
+ timescale: WEBCODECS_TIMESCALE,
29088
+ trackMediaTimeOffsetInTrackTimescale: 0
29054
29089
  };
29055
29090
  await registerVideoTrack({
29056
29091
  track,
@@ -30646,7 +30681,8 @@ var parseAac = async (state) => {
30646
30681
  trackId: 0,
30647
30682
  type: "audio",
30648
30683
  startInSeconds: 0,
30649
- timescale: WEBCODECS_TIMESCALE
30684
+ timescale: WEBCODECS_TIMESCALE,
30685
+ trackMediaTimeOffsetInTrackTimescale: 0
30650
30686
  },
30651
30687
  registerAudioSampleCallback: state.callbacks.registerAudioSampleCallback,
30652
30688
  tracks: state.callbacks.tracks,
@@ -31029,7 +31065,8 @@ var parseStreamInfo = async ({
31029
31065
  originalTimescale: WEBCODECS_TIMESCALE,
31030
31066
  trackId: 0,
31031
31067
  startInSeconds: 0,
31032
- timescale: WEBCODECS_TIMESCALE
31068
+ timescale: WEBCODECS_TIMESCALE,
31069
+ trackMediaTimeOffsetInTrackTimescale: 0
31033
31070
  },
31034
31071
  registerAudioSampleCallback: state.callbacks.registerAudioSampleCallback,
31035
31072
  tracks: state.callbacks.tracks,
@@ -31122,7 +31159,10 @@ var calculateFlatSamples = ({
31122
31159
  if (!moov) {
31123
31160
  throw new Error("No moov box found");
31124
31161
  }
31125
- const flatSamples = tracks2.map((track) => {
31162
+ const offsets = [];
31163
+ const trackIds = [];
31164
+ const map = new Map;
31165
+ for (const track of tracks2) {
31126
31166
  const trakBox = getTrakBoxByTrackId(moov, track.trackId);
31127
31167
  if (!trakBox) {
31128
31168
  throw new Error("No trak box found");
@@ -31133,14 +31173,17 @@ var calculateFlatSamples = ({
31133
31173
  moofComplete,
31134
31174
  trexBoxes: getTrexBoxes(moov)
31135
31175
  });
31136
- return samplePositions.map((samplePosition) => {
31137
- return {
31176
+ trackIds.push(track.trackId);
31177
+ for (const samplePosition of samplePositions) {
31178
+ offsets.push(samplePosition.offset);
31179
+ map.set(samplePosition.offset, {
31138
31180
  track,
31139
31181
  samplePosition
31140
- };
31141
- });
31142
- });
31143
- return flatSamples;
31182
+ });
31183
+ }
31184
+ }
31185
+ offsets.sort((a, b) => a - b);
31186
+ return { flatSamples: map, offsets, trackIds };
31144
31187
  };
31145
31188
  var cachedSamplePositionsState = () => {
31146
31189
  const cachedForMdatStart = {};
@@ -31434,28 +31477,33 @@ var getKey = (samplePositionTrack) => {
31434
31477
  return `${samplePositionTrack.track.trackId}-${samplePositionTrack.samplePosition.decodingTimestamp}`;
31435
31478
  };
31436
31479
  var findBestJump = ({
31437
- allSamplesSortedByOffset,
31480
+ sampleMap,
31481
+ offsetsSorted,
31438
31482
  visited,
31439
31483
  progresses
31440
31484
  }) => {
31441
31485
  const minProgress = Math.min(...Object.values(progresses));
31442
31486
  const trackNumberWithLowestProgress = Object.entries(progresses).find(([, progress]) => progress === minProgress)?.[0];
31443
- const firstSampleAboveMinProgress = allSamplesSortedByOffset.findIndex((sample) => sample.track.trackId === Number(trackNumberWithLowestProgress) && !visited.has(getKey(sample)));
31487
+ const firstSampleAboveMinProgress = offsetsSorted.findIndex((offset) => sampleMap.get(offset).track.trackId === Number(trackNumberWithLowestProgress) && !visited.has(getKey(sampleMap.get(offset))));
31444
31488
  return firstSampleAboveMinProgress;
31445
31489
  };
31446
- var calculateJumpMarks = (samplePositionTracks, endOfMdat) => {
31490
+ var calculateJumpMarks = ({
31491
+ sampleMap,
31492
+ offsetsSorted,
31493
+ trackIds,
31494
+ endOfMdat
31495
+ }) => {
31447
31496
  const progresses = {};
31448
- for (const track of samplePositionTracks) {
31449
- progresses[track[0].track.trackId] = 0;
31497
+ for (const trackId of trackIds) {
31498
+ progresses[trackId] = 0;
31450
31499
  }
31451
31500
  const jumpMarks = [];
31452
- const allSamplesSortedByOffset = samplePositionTracks.flat(1).filter((s) => s.track.type === "audio" || s.track.type === "video").sort((a, b) => a.samplePosition.offset - b.samplePosition.offset);
31453
31501
  let indexToVisit = 0;
31454
31502
  const visited = new Set;
31455
31503
  let rollOverToProcess = false;
31456
31504
  const increaseIndex = () => {
31457
31505
  indexToVisit++;
31458
- if (indexToVisit >= allSamplesSortedByOffset.length) {
31506
+ if (indexToVisit >= offsetsSorted.length) {
31459
31507
  rollOverToProcess = true;
31460
31508
  indexToVisit = 0;
31461
31509
  }
@@ -31469,23 +31517,24 @@ var calculateJumpMarks = (samplePositionTracks, endOfMdat) => {
31469
31517
  }
31470
31518
  const jumpMark = {
31471
31519
  afterSampleWithOffset: lastVisitedSample.samplePosition.offset,
31472
- jumpToOffset: allSamplesSortedByOffset[firstSampleAboveMinProgress].samplePosition.offset
31520
+ jumpToOffset: offsetsSorted[firstSampleAboveMinProgress]
31473
31521
  };
31474
31522
  indexToVisit = firstSampleAboveMinProgress;
31475
31523
  jumpMarks.push(jumpMark);
31476
31524
  };
31477
31525
  const addFinalJumpIfNecessary = () => {
31478
- if (indexToVisit === allSamplesSortedByOffset.length - 1) {
31526
+ if (indexToVisit === offsetsSorted.length - 1) {
31479
31527
  return;
31480
31528
  }
31481
31529
  jumpMarks.push({
31482
- afterSampleWithOffset: allSamplesSortedByOffset[indexToVisit].samplePosition.offset,
31530
+ afterSampleWithOffset: offsetsSorted[indexToVisit],
31483
31531
  jumpToOffset: endOfMdat
31484
31532
  });
31485
31533
  };
31486
31534
  const considerJump = () => {
31487
31535
  const firstSampleAboveMinProgress = findBestJump({
31488
- allSamplesSortedByOffset,
31536
+ sampleMap,
31537
+ offsetsSorted,
31489
31538
  visited,
31490
31539
  progresses
31491
31540
  });
@@ -31495,14 +31544,14 @@ var calculateJumpMarks = (samplePositionTracks, endOfMdat) => {
31495
31544
  } else {
31496
31545
  while (true) {
31497
31546
  increaseIndex();
31498
- if (!visited.has(getKey(allSamplesSortedByOffset[indexToVisit]))) {
31547
+ if (!visited.has(getKey(sampleMap.get(offsetsSorted[indexToVisit])))) {
31499
31548
  break;
31500
31549
  }
31501
31550
  }
31502
31551
  }
31503
31552
  };
31504
31553
  while (true) {
31505
- const currentSamplePosition = allSamplesSortedByOffset[indexToVisit];
31554
+ const currentSamplePosition = sampleMap.get(offsetsSorted[indexToVisit]);
31506
31555
  const sampleKey = getKey(currentSamplePosition);
31507
31556
  if (visited.has(sampleKey)) {
31508
31557
  considerJump();
@@ -31520,7 +31569,7 @@ var calculateJumpMarks = (samplePositionTracks, endOfMdat) => {
31520
31569
  rollOverToProcess = false;
31521
31570
  }
31522
31571
  lastVisitedSample = currentSamplePosition;
31523
- if (visited.size === allSamplesSortedByOffset.length) {
31572
+ if (visited.size === offsetsSorted.length) {
31524
31573
  addFinalJumpIfNecessary();
31525
31574
  break;
31526
31575
  }
@@ -31530,7 +31579,7 @@ var calculateJumpMarks = (samplePositionTracks, endOfMdat) => {
31530
31579
  const maxProgress = Math.max(...progressValues);
31531
31580
  const minProgress = Math.min(...progressValues);
31532
31581
  const spread = maxProgress - minProgress;
31533
- if (visited.size === allSamplesSortedByOffset.length) {
31582
+ if (visited.size === offsetsSorted.length) {
31534
31583
  addFinalJumpIfNecessary();
31535
31584
  break;
31536
31585
  }
@@ -31608,24 +31657,32 @@ var parseMdatSection = async (state) => {
31608
31657
  return parseMdatSection(state);
31609
31658
  }
31610
31659
  if (!state.iso.flatSamples.getSamples(mediaSection.start)) {
31611
- const flattedSamples = calculateFlatSamples({
31660
+ const {
31661
+ flatSamples: flatSamplesMap,
31662
+ offsets,
31663
+ trackIds
31664
+ } = calculateFlatSamples({
31612
31665
  state,
31613
31666
  mediaSectionStart: mediaSection.start
31614
31667
  });
31615
- const calcedJumpMarks = calculateJumpMarks(flattedSamples, endOfMdat);
31668
+ const calcedJumpMarks = calculateJumpMarks({
31669
+ sampleMap: flatSamplesMap,
31670
+ offsetsSorted: offsets,
31671
+ trackIds,
31672
+ endOfMdat
31673
+ });
31616
31674
  state.iso.flatSamples.setJumpMarks(mediaSection.start, calcedJumpMarks);
31617
- state.iso.flatSamples.setSamples(mediaSection.start, flattedSamples.flat(1));
31675
+ state.iso.flatSamples.setSamples(mediaSection.start, flatSamplesMap);
31618
31676
  }
31619
31677
  const flatSamples = state.iso.flatSamples.getSamples(mediaSection.start);
31620
31678
  const jumpMarks = state.iso.flatSamples.getJumpMarks(mediaSection.start);
31621
31679
  const { iterator } = state;
31622
- const samplesWithIndex = flatSamples.find((sample) => {
31623
- return sample.samplePosition.offset === iterator.counter.getOffset();
31624
- });
31680
+ const samplesWithIndex = flatSamples.get(iterator.counter.getOffset());
31625
31681
  if (!samplesWithIndex) {
31626
- const nextSample_ = flatSamples.filter((s) => s.samplePosition.offset > iterator.counter.getOffset()).sort((a, b) => a.samplePosition.offset - b.samplePosition.offset)[0];
31682
+ const offsets = Array.from(flatSamples.keys());
31683
+ const nextSample_ = offsets.filter((s) => s > iterator.counter.getOffset()).sort((a, b) => a - b)[0];
31627
31684
  if (nextSample_) {
31628
- iterator.discard(nextSample_.samplePosition.offset - iterator.counter.getOffset());
31685
+ iterator.discard(nextSample_ - iterator.counter.getOffset());
31629
31686
  return null;
31630
31687
  }
31631
31688
  Log.verbose(state.logLevel, "Could not find sample at offset", iterator.counter.getOffset(), "skipping to end of mdat");
@@ -31647,9 +31704,14 @@ var parseMdatSection = async (state) => {
31647
31704
  bigEndian,
31648
31705
  chunkSize
31649
31706
  } = samplesWithIndex.samplePosition;
31650
- const { originalTimescale, startInSeconds } = samplesWithIndex.track;
31651
- const cts = rawCts + startInSeconds * originalTimescale;
31652
- const dts = rawDts + startInSeconds * originalTimescale;
31707
+ const {
31708
+ originalTimescale,
31709
+ startInSeconds,
31710
+ trackMediaTimeOffsetInTrackTimescale,
31711
+ timescale: trackTimescale
31712
+ } = samplesWithIndex.track;
31713
+ const cts = rawCts + startInSeconds * originalTimescale - trackMediaTimeOffsetInTrackTimescale / trackTimescale * WEBCODECS_TIMESCALE;
31714
+ const dts = rawDts + startInSeconds * originalTimescale - trackMediaTimeOffsetInTrackTimescale / trackTimescale * WEBCODECS_TIMESCALE;
31653
31715
  const bytes = postprocessBytes({
31654
31716
  bytes: iterator.getSlice(samplesWithIndex.samplePosition.size),
31655
31717
  bigEndian,
@@ -33326,7 +33388,8 @@ var parseMpegHeader = async ({
33326
33388
  originalTimescale: 1e6,
33327
33389
  trackId: 0,
33328
33390
  startInSeconds: 0,
33329
- timescale: WEBCODECS_TIMESCALE
33391
+ timescale: WEBCODECS_TIMESCALE,
33392
+ trackMediaTimeOffsetInTrackTimescale: 0
33330
33393
  },
33331
33394
  registerAudioSampleCallback: state.callbacks.registerAudioSampleCallback,
33332
33395
  tracks: state.callbacks.tracks,
@@ -34363,7 +34426,8 @@ var handleAacPacket = async ({
34363
34426
  numberOfChannels: channelConfiguration,
34364
34427
  sampleRate,
34365
34428
  startInSeconds: 0,
34366
- timescale: WEBCODECS_TIMESCALE
34429
+ timescale: WEBCODECS_TIMESCALE,
34430
+ trackMediaTimeOffsetInTrackTimescale: 0
34367
34431
  };
34368
34432
  await registerAudioTrack({
34369
34433
  track,
@@ -34974,7 +35038,8 @@ var parseFmt = async ({
34974
35038
  originalTimescale: 1e6,
34975
35039
  trackId: 0,
34976
35040
  startInSeconds: 0,
34977
- timescale: WEBCODECS_TIMESCALE
35041
+ timescale: WEBCODECS_TIMESCALE,
35042
+ trackMediaTimeOffsetInTrackTimescale: 0
34978
35043
  },
34979
35044
  container: "wav",
34980
35045
  registerAudioSampleCallback: state.callbacks.registerAudioSampleCallback,
@@ -35549,9 +35614,11 @@ var parseLoop = async ({
35549
35614
  try {
35550
35615
  await triggerInfoEmit(state);
35551
35616
  await state.controller._internals.checkForAbortAndPause();
35617
+ const parseLoopStart = Date.now();
35552
35618
  const result = await runParseIteration({
35553
35619
  state
35554
35620
  });
35621
+ state.timings.timeInParseLoop += Date.now() - parseLoopStart;
35555
35622
  if (result !== null && result.action === "fetch-more-data") {
35556
35623
  Log.verbose(state.logLevel, `Need to fetch ${result.bytesNeeded} more bytes before we can continue`);
35557
35624
  const startBytesRemaining = state.iterator.bytesRemaining();
@@ -35638,6 +35705,7 @@ var printTimings = (state) => {
35638
35705
  Log.verbose(state.logLevel, `Time seeking: ${state.timings.timeSeeking}ms`);
35639
35706
  Log.verbose(state.logLevel, `Time checking if done: ${state.timings.timeCheckingIfDone}ms`);
35640
35707
  Log.verbose(state.logLevel, `Time freeing data: ${state.timings.timeFreeingData}ms`);
35708
+ Log.verbose(state.logLevel, `Time in parse loop: ${state.timings.timeInParseLoop}ms`);
35641
35709
  };
35642
35710
  var warningShown = false;
35643
35711
  var warnIfRemotionLicenseNotAcknowledged = ({
@@ -37170,7 +37238,8 @@ var timingsState = () => {
37170
37238
  timeReadingData: 0,
37171
37239
  timeSeeking: 0,
37172
37240
  timeCheckingIfDone: 0,
37173
- timeFreeingData: 0
37241
+ timeFreeingData: 0,
37242
+ timeInParseLoop: 0
37174
37243
  };
37175
37244
  };
37176
37245
  var lastEmittedSampleState = () => {