@remotion/media-parser 4.0.295 → 4.0.296

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -27,12 +27,29 @@ const parseMdatSection = async (state) => {
27
27
  if (mfra) {
28
28
  const lastMoof = (0, last_moof_box_1.getLastMoofBox)(mfra);
29
29
  if (lastMoof && lastMoof > endOfMdat) {
30
- log_1.Log.verbose(state.logLevel, 'Skipping to last moof', lastMoof, 'end of mdat', endOfMdat);
30
+ log_1.Log.verbose(state.logLevel, 'Skipping to last moof', lastMoof);
31
31
  return (0, skip_1.makeSkip)(lastMoof);
32
32
  }
33
33
  }
34
34
  return (0, skip_1.makeSkip)(endOfMdat);
35
35
  }
36
+ // if we only need the first and last sample, we may skip over the samples in the middle
37
+ // this logic skips the samples in the middle for a fragmented mp4
38
+ if ((0, may_skip_video_data_1.maySkipOverSamplesInTheMiddle)({ state })) {
39
+ const mfra = state.iso.mfra.getIfAlreadyLoaded();
40
+ if (mfra) {
41
+ const lastMoof = (0, last_moof_box_1.getLastMoofBox)(mfra);
42
+ // we require that all moof boxes of both tracks have been processed, for correct duration calculation
43
+ const firstMax = (0, last_moof_box_1.getMaxFirstMoofOffset)(mfra);
44
+ const mediaSectionsBiggerThanMoof = state.mediaSection
45
+ .getMediaSections()
46
+ .filter((m) => m.start > firstMax).length;
47
+ if (mediaSectionsBiggerThanMoof > 1 && lastMoof && lastMoof > endOfMdat) {
48
+ log_1.Log.verbose(state.logLevel, 'Skipping to last moof because only first and last samples are needed');
49
+ return (0, skip_1.makeSkip)(lastMoof);
50
+ }
51
+ }
52
+ }
36
53
  const alreadyHasMoov = (0, get_tracks_1.getHasTracks)(state, true);
37
54
  if (!alreadyHasMoov) {
38
55
  const moov = await (0, get_moov_atom_1.getMoovAtom)({
@@ -9320,6 +9320,18 @@ var needsToIterateOverSamples = ({
9320
9320
  const selectedKeys = keys.filter((k) => fields[k]);
9321
9321
  return selectedKeys.some((k) => fieldsNeedSamplesMap[k] && !emittedFields[k]);
9322
9322
  };
9323
+ var fieldsNeedEverySampleMap = {
9324
+ ...fieldsNeedSamplesMap,
9325
+ slowDurationInSeconds: false
9326
+ };
9327
+ var needsToIterateOverEverySample = ({
9328
+ fields,
9329
+ emittedFields
9330
+ }) => {
9331
+ const keys = Object.keys(fields ?? {});
9332
+ const selectedKeys = keys.filter((k) => fields[k]);
9333
+ return selectedKeys.some((k) => fieldsNeedEverySampleMap[k] && !emittedFields[k]);
9334
+ };
9323
9335
 
9324
9336
  // src/disallow-forward-seek-if-samples-are-needed.ts
9325
9337
  var disallowForwardSeekIfSamplesAreNeeded = ({
@@ -10015,11 +10027,26 @@ var emitAvailableInfo = async ({
10015
10027
  };
10016
10028
 
10017
10029
  // src/state/may-skip-video-data.ts
10018
- var maySkipVideoData = ({ state }) => {
10019
- const hasAllTracksAndNoCallbacks = state.callbacks.tracks.hasAllTracks() && Object.values(state.callbacks.videoSampleCallbacks).length === 0 && Object.values(state.callbacks.audioSampleCallbacks).length === 0;
10030
+ var getHasCallbacks = (state) => {
10020
10031
  const hasNoTrackHandlers = !state.callbacks.hasAudioTrackHandlers && !state.callbacks.hasVideoTrackHandlers;
10021
- const noCallbacksNeeded = hasNoTrackHandlers || hasAllTracksAndNoCallbacks;
10022
- return noCallbacksNeeded && !needsToIterateOverSamples({
10032
+ if (hasNoTrackHandlers) {
10033
+ return false;
10034
+ }
10035
+ const hasAllTracksAndNoCallbacks = !state.callbacks.tracks.hasAllTracks() || Object.values(state.callbacks.videoSampleCallbacks).length > 0 || Object.values(state.callbacks.audioSampleCallbacks).length > 0;
10036
+ return hasAllTracksAndNoCallbacks;
10037
+ };
10038
+ var maySkipVideoData = ({ state }) => {
10039
+ const hasCallbacks = getHasCallbacks(state);
10040
+ return !hasCallbacks && !needsToIterateOverSamples({
10041
+ emittedFields: state.emittedFields,
10042
+ fields: state.fields
10043
+ });
10044
+ };
10045
+ var maySkipOverSamplesInTheMiddle = ({
10046
+ state
10047
+ }) => {
10048
+ const hasCallbacks = getHasCallbacks(state);
10049
+ return !hasCallbacks && !needsToIterateOverEverySample({
10023
10050
  emittedFields: state.emittedFields,
10024
10051
  fields: state.fields
10025
10052
  });
@@ -10780,6 +10807,13 @@ var getLastMoofBox = (boxes) => {
10780
10807
  return null;
10781
10808
  }
10782
10809
  };
10810
+ var getMaxFirstMoofOffset = (boxes) => {
10811
+ const tfras = boxes.filter((b) => b.type === "tfra-box");
10812
+ const firstMoofOffsets = tfras.map((f) => {
10813
+ return f.entries[0].moofOffset;
10814
+ });
10815
+ return Math.max(...firstMoofOffsets.filter(truthy));
10816
+ };
10783
10817
 
10784
10818
  // src/state/can-skip-tracks.ts
10785
10819
  var needsTracksForField = ({
@@ -11181,12 +11215,24 @@ var parseMdatSection = async (state) => {
11181
11215
  if (mfra) {
11182
11216
  const lastMoof = getLastMoofBox(mfra);
11183
11217
  if (lastMoof && lastMoof > endOfMdat) {
11184
- Log.verbose(state.logLevel, "Skipping to last moof", lastMoof, "end of mdat", endOfMdat);
11218
+ Log.verbose(state.logLevel, "Skipping to last moof", lastMoof);
11185
11219
  return makeSkip(lastMoof);
11186
11220
  }
11187
11221
  }
11188
11222
  return makeSkip(endOfMdat);
11189
11223
  }
11224
+ if (maySkipOverSamplesInTheMiddle({ state })) {
11225
+ const mfra = state.iso.mfra.getIfAlreadyLoaded();
11226
+ if (mfra) {
11227
+ const lastMoof = getLastMoofBox(mfra);
11228
+ const firstMax = getMaxFirstMoofOffset(mfra);
11229
+ const mediaSectionsBiggerThanMoof = state.mediaSection.getMediaSections().filter((m) => m.start > firstMax).length;
11230
+ if (mediaSectionsBiggerThanMoof > 1 && lastMoof && lastMoof > endOfMdat) {
11231
+ Log.verbose(state.logLevel, "Skipping to last moof because only first and last samples are needed");
11232
+ return makeSkip(lastMoof);
11233
+ }
11234
+ }
11235
+ }
11190
11236
  const alreadyHasMoov = getHasTracks(state, true);
11191
11237
  if (!alreadyHasMoov) {
11192
11238
  const moov = await getMoovAtom({
@@ -16411,29 +16457,25 @@ var samplesObservedState = () => {
16411
16457
  const videoSamples = new Map;
16412
16458
  const audioSamples = new Map;
16413
16459
  const getSlowVideoDurationInSeconds = () => {
16414
- let videoDuration = null;
16415
- if (smallestVideoSample !== undefined && largestVideoSample !== undefined) {
16416
- const startingTimestampDifference = largestVideoSample - smallestVideoSample;
16417
- const timeBetweenSamples = startingTimestampDifference / (videoSamples.size - 1);
16418
- videoDuration = timeBetweenSamples * videoSamples.size;
16419
- }
16420
- return videoDuration;
16460
+ return (largestVideoSample ?? 0) - (smallestVideoSample ?? 0);
16461
+ };
16462
+ const getSlowAudioDurationInSeconds = () => {
16463
+ return (largestAudioSample ?? 0) - (smallestAudioSample ?? 0);
16421
16464
  };
16422
16465
  const getSlowDurationInSeconds = () => {
16423
- const videoDuration = getSlowVideoDurationInSeconds();
16424
- let audioDuration = null;
16425
- if (smallestAudioSample !== undefined && largestAudioSample !== undefined) {
16426
- const startingTimestampDifferenceAudio = largestAudioSample - smallestAudioSample;
16427
- const timeBetweenSamplesAudio = startingTimestampDifferenceAudio / (audioSamples.size - 1);
16428
- audioDuration = timeBetweenSamplesAudio * audioSamples.size;
16466
+ const smallestSample = Math.min(smallestAudioSample ?? Infinity, smallestVideoSample ?? Infinity);
16467
+ const largestSample = Math.max(largestAudioSample ?? 0, largestVideoSample ?? 0);
16468
+ if (smallestSample === Infinity || largestSample === Infinity) {
16469
+ return 0;
16429
16470
  }
16430
- return Math.max(videoDuration ?? 0, audioDuration ?? 0);
16471
+ return largestSample - smallestSample;
16431
16472
  };
16432
16473
  const addVideoSample = (videoSample) => {
16433
16474
  videoSamples.set(videoSample.cts, videoSample.data.byteLength);
16434
16475
  const presentationTimeInSeconds = videoSample.cts / videoSample.timescale;
16476
+ const duration2 = (videoSample.duration ?? 0) / videoSample.timescale;
16435
16477
  if (largestVideoSample === undefined || presentationTimeInSeconds > largestVideoSample) {
16436
- largestVideoSample = presentationTimeInSeconds;
16478
+ largestVideoSample = presentationTimeInSeconds + duration2;
16437
16479
  }
16438
16480
  if (smallestVideoSample === undefined || presentationTimeInSeconds < smallestVideoSample) {
16439
16481
  smallestVideoSample = presentationTimeInSeconds;
@@ -16442,23 +16484,24 @@ var samplesObservedState = () => {
16442
16484
  const addAudioSample = (audioSample) => {
16443
16485
  audioSamples.set(audioSample.cts, audioSample.data.byteLength);
16444
16486
  const presentationTimeInSeconds = audioSample.cts / audioSample.timescale;
16487
+ const duration2 = (audioSample.duration ?? 0) / audioSample.timescale;
16445
16488
  if (largestAudioSample === undefined || presentationTimeInSeconds > largestAudioSample) {
16446
- largestAudioSample = presentationTimeInSeconds;
16489
+ largestAudioSample = presentationTimeInSeconds + duration2;
16447
16490
  }
16448
16491
  if (smallestAudioSample === undefined || presentationTimeInSeconds < smallestAudioSample) {
16449
16492
  smallestAudioSample = presentationTimeInSeconds;
16450
16493
  }
16451
16494
  };
16452
16495
  const getFps2 = () => {
16453
- const videoDuration = getSlowVideoDurationInSeconds() ?? 0;
16496
+ const videoDuration = (largestVideoSample ?? 0) - (smallestVideoSample ?? 0);
16454
16497
  if (videoDuration === 0) {
16455
16498
  return 0;
16456
16499
  }
16457
- return videoSamples.size / videoDuration;
16500
+ return (videoSamples.size - 1) / videoDuration;
16458
16501
  };
16459
16502
  const getSlowNumberOfFrames = () => videoSamples.size;
16460
16503
  const getAudioBitrate = () => {
16461
- const audioDuration = getSlowDurationInSeconds();
16504
+ const audioDuration = getSlowAudioDurationInSeconds();
16462
16505
  if (audioDuration === 0 || audioSamples.size === 0) {
16463
16506
  return null;
16464
16507
  }
@@ -16466,7 +16509,7 @@ var samplesObservedState = () => {
16466
16509
  return audioSizesInBytes * 8 / audioDuration;
16467
16510
  };
16468
16511
  const getVideoBitrate = () => {
16469
- const videoDuration = getSlowDurationInSeconds();
16512
+ const videoDuration = getSlowVideoDurationInSeconds();
16470
16513
  if (videoDuration === 0 || videoSamples.size === 0) {
16471
16514
  return null;
16472
16515
  }
@@ -16486,7 +16529,8 @@ var samplesObservedState = () => {
16486
16529
  getAudioBitrate,
16487
16530
  getVideoBitrate,
16488
16531
  getLastSampleObserved,
16489
- setLastSampleObserved
16532
+ setLastSampleObserved,
16533
+ getAmountOfSamplesObserved: () => videoSamples.size + audioSamples.size
16490
16534
  };
16491
16535
  };
16492
16536
 
@@ -16977,7 +17021,7 @@ var downloadAndParseMedia = async (options) => {
16977
17021
  return returnValue;
16978
17022
  };
16979
17023
  // src/version.ts
16980
- var VERSION = "4.0.295";
17024
+ var VERSION = "4.0.296";
16981
17025
 
16982
17026
  // src/index.ts
16983
17027
  var MediaParserInternals = {
@@ -7499,6 +7499,18 @@ var needsToIterateOverSamples = ({
7499
7499
  const selectedKeys = keys.filter((k) => fields[k]);
7500
7500
  return selectedKeys.some((k) => fieldsNeedSamplesMap[k] && !emittedFields[k]);
7501
7501
  };
7502
+ var fieldsNeedEverySampleMap = {
7503
+ ...fieldsNeedSamplesMap,
7504
+ slowDurationInSeconds: false
7505
+ };
7506
+ var needsToIterateOverEverySample = ({
7507
+ fields,
7508
+ emittedFields
7509
+ }) => {
7510
+ const keys = Object.keys(fields ?? {});
7511
+ const selectedKeys = keys.filter((k) => fields[k]);
7512
+ return selectedKeys.some((k) => fieldsNeedEverySampleMap[k] && !emittedFields[k]);
7513
+ };
7502
7514
 
7503
7515
  // src/disallow-forward-seek-if-samples-are-needed.ts
7504
7516
  var disallowForwardSeekIfSamplesAreNeeded = ({
@@ -8194,11 +8206,26 @@ var emitAvailableInfo = async ({
8194
8206
  };
8195
8207
 
8196
8208
  // src/state/may-skip-video-data.ts
8197
- var maySkipVideoData = ({ state }) => {
8198
- const hasAllTracksAndNoCallbacks = state.callbacks.tracks.hasAllTracks() && Object.values(state.callbacks.videoSampleCallbacks).length === 0 && Object.values(state.callbacks.audioSampleCallbacks).length === 0;
8209
+ var getHasCallbacks = (state) => {
8199
8210
  const hasNoTrackHandlers = !state.callbacks.hasAudioTrackHandlers && !state.callbacks.hasVideoTrackHandlers;
8200
- const noCallbacksNeeded = hasNoTrackHandlers || hasAllTracksAndNoCallbacks;
8201
- return noCallbacksNeeded && !needsToIterateOverSamples({
8211
+ if (hasNoTrackHandlers) {
8212
+ return false;
8213
+ }
8214
+ const hasAllTracksAndNoCallbacks = !state.callbacks.tracks.hasAllTracks() || Object.values(state.callbacks.videoSampleCallbacks).length > 0 || Object.values(state.callbacks.audioSampleCallbacks).length > 0;
8215
+ return hasAllTracksAndNoCallbacks;
8216
+ };
8217
+ var maySkipVideoData = ({ state }) => {
8218
+ const hasCallbacks = getHasCallbacks(state);
8219
+ return !hasCallbacks && !needsToIterateOverSamples({
8220
+ emittedFields: state.emittedFields,
8221
+ fields: state.fields
8222
+ });
8223
+ };
8224
+ var maySkipOverSamplesInTheMiddle = ({
8225
+ state
8226
+ }) => {
8227
+ const hasCallbacks = getHasCallbacks(state);
8228
+ return !hasCallbacks && !needsToIterateOverEverySample({
8202
8229
  emittedFields: state.emittedFields,
8203
8230
  fields: state.fields
8204
8231
  });
@@ -8969,6 +8996,13 @@ var getLastMoofBox = (boxes) => {
8969
8996
  return null;
8970
8997
  }
8971
8998
  };
8999
+ var getMaxFirstMoofOffset = (boxes) => {
9000
+ const tfras = boxes.filter((b) => b.type === "tfra-box");
9001
+ const firstMoofOffsets = tfras.map((f) => {
9002
+ return f.entries[0].moofOffset;
9003
+ });
9004
+ return Math.max(...firstMoofOffsets.filter(truthy));
9005
+ };
8972
9006
 
8973
9007
  // src/state/can-skip-tracks.ts
8974
9008
  var needsTracksForField = ({
@@ -11273,12 +11307,24 @@ var parseMdatSection = async (state) => {
11273
11307
  if (mfra) {
11274
11308
  const lastMoof = getLastMoofBox(mfra);
11275
11309
  if (lastMoof && lastMoof > endOfMdat) {
11276
- Log.verbose(state.logLevel, "Skipping to last moof", lastMoof, "end of mdat", endOfMdat);
11310
+ Log.verbose(state.logLevel, "Skipping to last moof", lastMoof);
11277
11311
  return makeSkip(lastMoof);
11278
11312
  }
11279
11313
  }
11280
11314
  return makeSkip(endOfMdat);
11281
11315
  }
11316
+ if (maySkipOverSamplesInTheMiddle({ state })) {
11317
+ const mfra = state.iso.mfra.getIfAlreadyLoaded();
11318
+ if (mfra) {
11319
+ const lastMoof = getLastMoofBox(mfra);
11320
+ const firstMax = getMaxFirstMoofOffset(mfra);
11321
+ const mediaSectionsBiggerThanMoof = state.mediaSection.getMediaSections().filter((m) => m.start > firstMax).length;
11322
+ if (mediaSectionsBiggerThanMoof > 1 && lastMoof && lastMoof > endOfMdat) {
11323
+ Log.verbose(state.logLevel, "Skipping to last moof because only first and last samples are needed");
11324
+ return makeSkip(lastMoof);
11325
+ }
11326
+ }
11327
+ }
11282
11328
  const alreadyHasMoov = getHasTracks(state, true);
11283
11329
  if (!alreadyHasMoov) {
11284
11330
  const moov = await getMoovAtom({
@@ -16524,29 +16570,25 @@ var samplesObservedState = () => {
16524
16570
  const videoSamples = new Map;
16525
16571
  const audioSamples = new Map;
16526
16572
  const getSlowVideoDurationInSeconds = () => {
16527
- let videoDuration = null;
16528
- if (smallestVideoSample !== undefined && largestVideoSample !== undefined) {
16529
- const startingTimestampDifference = largestVideoSample - smallestVideoSample;
16530
- const timeBetweenSamples = startingTimestampDifference / (videoSamples.size - 1);
16531
- videoDuration = timeBetweenSamples * videoSamples.size;
16532
- }
16533
- return videoDuration;
16573
+ return (largestVideoSample ?? 0) - (smallestVideoSample ?? 0);
16574
+ };
16575
+ const getSlowAudioDurationInSeconds = () => {
16576
+ return (largestAudioSample ?? 0) - (smallestAudioSample ?? 0);
16534
16577
  };
16535
16578
  const getSlowDurationInSeconds = () => {
16536
- const videoDuration = getSlowVideoDurationInSeconds();
16537
- let audioDuration = null;
16538
- if (smallestAudioSample !== undefined && largestAudioSample !== undefined) {
16539
- const startingTimestampDifferenceAudio = largestAudioSample - smallestAudioSample;
16540
- const timeBetweenSamplesAudio = startingTimestampDifferenceAudio / (audioSamples.size - 1);
16541
- audioDuration = timeBetweenSamplesAudio * audioSamples.size;
16579
+ const smallestSample = Math.min(smallestAudioSample ?? Infinity, smallestVideoSample ?? Infinity);
16580
+ const largestSample = Math.max(largestAudioSample ?? 0, largestVideoSample ?? 0);
16581
+ if (smallestSample === Infinity || largestSample === Infinity) {
16582
+ return 0;
16542
16583
  }
16543
- return Math.max(videoDuration ?? 0, audioDuration ?? 0);
16584
+ return largestSample - smallestSample;
16544
16585
  };
16545
16586
  const addVideoSample = (videoSample) => {
16546
16587
  videoSamples.set(videoSample.cts, videoSample.data.byteLength);
16547
16588
  const presentationTimeInSeconds = videoSample.cts / videoSample.timescale;
16589
+ const duration2 = (videoSample.duration ?? 0) / videoSample.timescale;
16548
16590
  if (largestVideoSample === undefined || presentationTimeInSeconds > largestVideoSample) {
16549
- largestVideoSample = presentationTimeInSeconds;
16591
+ largestVideoSample = presentationTimeInSeconds + duration2;
16550
16592
  }
16551
16593
  if (smallestVideoSample === undefined || presentationTimeInSeconds < smallestVideoSample) {
16552
16594
  smallestVideoSample = presentationTimeInSeconds;
@@ -16555,23 +16597,24 @@ var samplesObservedState = () => {
16555
16597
  const addAudioSample = (audioSample) => {
16556
16598
  audioSamples.set(audioSample.cts, audioSample.data.byteLength);
16557
16599
  const presentationTimeInSeconds = audioSample.cts / audioSample.timescale;
16600
+ const duration2 = (audioSample.duration ?? 0) / audioSample.timescale;
16558
16601
  if (largestAudioSample === undefined || presentationTimeInSeconds > largestAudioSample) {
16559
- largestAudioSample = presentationTimeInSeconds;
16602
+ largestAudioSample = presentationTimeInSeconds + duration2;
16560
16603
  }
16561
16604
  if (smallestAudioSample === undefined || presentationTimeInSeconds < smallestAudioSample) {
16562
16605
  smallestAudioSample = presentationTimeInSeconds;
16563
16606
  }
16564
16607
  };
16565
16608
  const getFps2 = () => {
16566
- const videoDuration = getSlowVideoDurationInSeconds() ?? 0;
16609
+ const videoDuration = (largestVideoSample ?? 0) - (smallestVideoSample ?? 0);
16567
16610
  if (videoDuration === 0) {
16568
16611
  return 0;
16569
16612
  }
16570
- return videoSamples.size / videoDuration;
16613
+ return (videoSamples.size - 1) / videoDuration;
16571
16614
  };
16572
16615
  const getSlowNumberOfFrames = () => videoSamples.size;
16573
16616
  const getAudioBitrate = () => {
16574
- const audioDuration = getSlowDurationInSeconds();
16617
+ const audioDuration = getSlowAudioDurationInSeconds();
16575
16618
  if (audioDuration === 0 || audioSamples.size === 0) {
16576
16619
  return null;
16577
16620
  }
@@ -16579,7 +16622,7 @@ var samplesObservedState = () => {
16579
16622
  return audioSizesInBytes * 8 / audioDuration;
16580
16623
  };
16581
16624
  const getVideoBitrate = () => {
16582
- const videoDuration = getSlowDurationInSeconds();
16625
+ const videoDuration = getSlowVideoDurationInSeconds();
16583
16626
  if (videoDuration === 0 || videoSamples.size === 0) {
16584
16627
  return null;
16585
16628
  }
@@ -16599,7 +16642,8 @@ var samplesObservedState = () => {
16599
16642
  getAudioBitrate,
16600
16643
  getVideoBitrate,
16601
16644
  getLastSampleObserved,
16602
- setLastSampleObserved
16645
+ setLastSampleObserved,
16646
+ getAmountOfSamplesObserved: () => videoSamples.size + audioSamples.size
16603
16647
  };
16604
16648
  };
16605
16649
 
@@ -7411,6 +7411,18 @@ var needsToIterateOverSamples = ({
7411
7411
  const selectedKeys = keys.filter((k) => fields[k]);
7412
7412
  return selectedKeys.some((k) => fieldsNeedSamplesMap[k] && !emittedFields[k]);
7413
7413
  };
7414
+ var fieldsNeedEverySampleMap = {
7415
+ ...fieldsNeedSamplesMap,
7416
+ slowDurationInSeconds: false
7417
+ };
7418
+ var needsToIterateOverEverySample = ({
7419
+ fields,
7420
+ emittedFields
7421
+ }) => {
7422
+ const keys = Object.keys(fields ?? {});
7423
+ const selectedKeys = keys.filter((k) => fields[k]);
7424
+ return selectedKeys.some((k) => fieldsNeedEverySampleMap[k] && !emittedFields[k]);
7425
+ };
7414
7426
 
7415
7427
  // src/disallow-forward-seek-if-samples-are-needed.ts
7416
7428
  var disallowForwardSeekIfSamplesAreNeeded = ({
@@ -8106,11 +8118,26 @@ var emitAvailableInfo = async ({
8106
8118
  };
8107
8119
 
8108
8120
  // src/state/may-skip-video-data.ts
8109
- var maySkipVideoData = ({ state }) => {
8110
- const hasAllTracksAndNoCallbacks = state.callbacks.tracks.hasAllTracks() && Object.values(state.callbacks.videoSampleCallbacks).length === 0 && Object.values(state.callbacks.audioSampleCallbacks).length === 0;
8121
+ var getHasCallbacks = (state) => {
8111
8122
  const hasNoTrackHandlers = !state.callbacks.hasAudioTrackHandlers && !state.callbacks.hasVideoTrackHandlers;
8112
- const noCallbacksNeeded = hasNoTrackHandlers || hasAllTracksAndNoCallbacks;
8113
- return noCallbacksNeeded && !needsToIterateOverSamples({
8123
+ if (hasNoTrackHandlers) {
8124
+ return false;
8125
+ }
8126
+ const hasAllTracksAndNoCallbacks = !state.callbacks.tracks.hasAllTracks() || Object.values(state.callbacks.videoSampleCallbacks).length > 0 || Object.values(state.callbacks.audioSampleCallbacks).length > 0;
8127
+ return hasAllTracksAndNoCallbacks;
8128
+ };
8129
+ var maySkipVideoData = ({ state }) => {
8130
+ const hasCallbacks = getHasCallbacks(state);
8131
+ return !hasCallbacks && !needsToIterateOverSamples({
8132
+ emittedFields: state.emittedFields,
8133
+ fields: state.fields
8134
+ });
8135
+ };
8136
+ var maySkipOverSamplesInTheMiddle = ({
8137
+ state
8138
+ }) => {
8139
+ const hasCallbacks = getHasCallbacks(state);
8140
+ return !hasCallbacks && !needsToIterateOverEverySample({
8114
8141
  emittedFields: state.emittedFields,
8115
8142
  fields: state.fields
8116
8143
  });
@@ -8881,6 +8908,13 @@ var getLastMoofBox = (boxes) => {
8881
8908
  return null;
8882
8909
  }
8883
8910
  };
8911
+ var getMaxFirstMoofOffset = (boxes) => {
8912
+ const tfras = boxes.filter((b) => b.type === "tfra-box");
8913
+ const firstMoofOffsets = tfras.map((f) => {
8914
+ return f.entries[0].moofOffset;
8915
+ });
8916
+ return Math.max(...firstMoofOffsets.filter(truthy));
8917
+ };
8884
8918
 
8885
8919
  // src/state/can-skip-tracks.ts
8886
8920
  var needsTracksForField = ({
@@ -11185,12 +11219,24 @@ var parseMdatSection = async (state) => {
11185
11219
  if (mfra) {
11186
11220
  const lastMoof = getLastMoofBox(mfra);
11187
11221
  if (lastMoof && lastMoof > endOfMdat) {
11188
- Log.verbose(state.logLevel, "Skipping to last moof", lastMoof, "end of mdat", endOfMdat);
11222
+ Log.verbose(state.logLevel, "Skipping to last moof", lastMoof);
11189
11223
  return makeSkip(lastMoof);
11190
11224
  }
11191
11225
  }
11192
11226
  return makeSkip(endOfMdat);
11193
11227
  }
11228
+ if (maySkipOverSamplesInTheMiddle({ state })) {
11229
+ const mfra = state.iso.mfra.getIfAlreadyLoaded();
11230
+ if (mfra) {
11231
+ const lastMoof = getLastMoofBox(mfra);
11232
+ const firstMax = getMaxFirstMoofOffset(mfra);
11233
+ const mediaSectionsBiggerThanMoof = state.mediaSection.getMediaSections().filter((m) => m.start > firstMax).length;
11234
+ if (mediaSectionsBiggerThanMoof > 1 && lastMoof && lastMoof > endOfMdat) {
11235
+ Log.verbose(state.logLevel, "Skipping to last moof because only first and last samples are needed");
11236
+ return makeSkip(lastMoof);
11237
+ }
11238
+ }
11239
+ }
11194
11240
  const alreadyHasMoov = getHasTracks(state, true);
11195
11241
  if (!alreadyHasMoov) {
11196
11242
  const moov = await getMoovAtom({
@@ -16408,29 +16454,25 @@ var samplesObservedState = () => {
16408
16454
  const videoSamples = new Map;
16409
16455
  const audioSamples = new Map;
16410
16456
  const getSlowVideoDurationInSeconds = () => {
16411
- let videoDuration = null;
16412
- if (smallestVideoSample !== undefined && largestVideoSample !== undefined) {
16413
- const startingTimestampDifference = largestVideoSample - smallestVideoSample;
16414
- const timeBetweenSamples = startingTimestampDifference / (videoSamples.size - 1);
16415
- videoDuration = timeBetweenSamples * videoSamples.size;
16416
- }
16417
- return videoDuration;
16457
+ return (largestVideoSample ?? 0) - (smallestVideoSample ?? 0);
16458
+ };
16459
+ const getSlowAudioDurationInSeconds = () => {
16460
+ return (largestAudioSample ?? 0) - (smallestAudioSample ?? 0);
16418
16461
  };
16419
16462
  const getSlowDurationInSeconds = () => {
16420
- const videoDuration = getSlowVideoDurationInSeconds();
16421
- let audioDuration = null;
16422
- if (smallestAudioSample !== undefined && largestAudioSample !== undefined) {
16423
- const startingTimestampDifferenceAudio = largestAudioSample - smallestAudioSample;
16424
- const timeBetweenSamplesAudio = startingTimestampDifferenceAudio / (audioSamples.size - 1);
16425
- audioDuration = timeBetweenSamplesAudio * audioSamples.size;
16463
+ const smallestSample = Math.min(smallestAudioSample ?? Infinity, smallestVideoSample ?? Infinity);
16464
+ const largestSample = Math.max(largestAudioSample ?? 0, largestVideoSample ?? 0);
16465
+ if (smallestSample === Infinity || largestSample === Infinity) {
16466
+ return 0;
16426
16467
  }
16427
- return Math.max(videoDuration ?? 0, audioDuration ?? 0);
16468
+ return largestSample - smallestSample;
16428
16469
  };
16429
16470
  const addVideoSample = (videoSample) => {
16430
16471
  videoSamples.set(videoSample.cts, videoSample.data.byteLength);
16431
16472
  const presentationTimeInSeconds = videoSample.cts / videoSample.timescale;
16473
+ const duration2 = (videoSample.duration ?? 0) / videoSample.timescale;
16432
16474
  if (largestVideoSample === undefined || presentationTimeInSeconds > largestVideoSample) {
16433
- largestVideoSample = presentationTimeInSeconds;
16475
+ largestVideoSample = presentationTimeInSeconds + duration2;
16434
16476
  }
16435
16477
  if (smallestVideoSample === undefined || presentationTimeInSeconds < smallestVideoSample) {
16436
16478
  smallestVideoSample = presentationTimeInSeconds;
@@ -16439,23 +16481,24 @@ var samplesObservedState = () => {
16439
16481
  const addAudioSample = (audioSample) => {
16440
16482
  audioSamples.set(audioSample.cts, audioSample.data.byteLength);
16441
16483
  const presentationTimeInSeconds = audioSample.cts / audioSample.timescale;
16484
+ const duration2 = (audioSample.duration ?? 0) / audioSample.timescale;
16442
16485
  if (largestAudioSample === undefined || presentationTimeInSeconds > largestAudioSample) {
16443
- largestAudioSample = presentationTimeInSeconds;
16486
+ largestAudioSample = presentationTimeInSeconds + duration2;
16444
16487
  }
16445
16488
  if (smallestAudioSample === undefined || presentationTimeInSeconds < smallestAudioSample) {
16446
16489
  smallestAudioSample = presentationTimeInSeconds;
16447
16490
  }
16448
16491
  };
16449
16492
  const getFps2 = () => {
16450
- const videoDuration = getSlowVideoDurationInSeconds() ?? 0;
16493
+ const videoDuration = (largestVideoSample ?? 0) - (smallestVideoSample ?? 0);
16451
16494
  if (videoDuration === 0) {
16452
16495
  return 0;
16453
16496
  }
16454
- return videoSamples.size / videoDuration;
16497
+ return (videoSamples.size - 1) / videoDuration;
16455
16498
  };
16456
16499
  const getSlowNumberOfFrames = () => videoSamples.size;
16457
16500
  const getAudioBitrate = () => {
16458
- const audioDuration = getSlowDurationInSeconds();
16501
+ const audioDuration = getSlowAudioDurationInSeconds();
16459
16502
  if (audioDuration === 0 || audioSamples.size === 0) {
16460
16503
  return null;
16461
16504
  }
@@ -16463,7 +16506,7 @@ var samplesObservedState = () => {
16463
16506
  return audioSizesInBytes * 8 / audioDuration;
16464
16507
  };
16465
16508
  const getVideoBitrate = () => {
16466
- const videoDuration = getSlowDurationInSeconds();
16509
+ const videoDuration = getSlowVideoDurationInSeconds();
16467
16510
  if (videoDuration === 0 || videoSamples.size === 0) {
16468
16511
  return null;
16469
16512
  }
@@ -16483,7 +16526,8 @@ var samplesObservedState = () => {
16483
16526
  getAudioBitrate,
16484
16527
  getVideoBitrate,
16485
16528
  getLastSampleObserved,
16486
- setLastSampleObserved
16529
+ setLastSampleObserved,
16530
+ getAmountOfSamplesObserved: () => videoSamples.size + audioSamples.size
16487
16531
  };
16488
16532
  };
16489
16533
 
package/dist/index.d.ts CHANGED
@@ -1039,6 +1039,7 @@ export declare const MediaParserInternals: {
1039
1039
  getVideoBitrate: () => number | null;
1040
1040
  getLastSampleObserved: () => boolean;
1041
1041
  setLastSampleObserved: () => void;
1042
+ getAmountOfSamplesObserved: () => number;
1042
1043
  };
1043
1044
  contentLength: number;
1044
1045
  images: {
@@ -1,2 +1,3 @@
1
1
  import type { IsoBaseMediaBox } from '../../containers/iso-base-media/base-media-box';
2
2
  export declare const getLastMoofBox: (boxes: IsoBaseMediaBox[]) => number | null | undefined;
3
+ export declare const getMaxFirstMoofOffset: (boxes: IsoBaseMediaBox[]) => number;
@@ -1,6 +1,6 @@
1
1
  "use strict";
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
- exports.getLastMoofBox = void 0;
3
+ exports.getMaxFirstMoofOffset = exports.getLastMoofBox = void 0;
4
4
  const truthy_1 = require("../../truthy");
5
5
  const getLastMoofBox = (boxes) => {
6
6
  if (boxes) {
@@ -19,3 +19,11 @@ const getLastMoofBox = (boxes) => {
19
19
  }
20
20
  };
21
21
  exports.getLastMoofBox = getLastMoofBox;
22
+ const getMaxFirstMoofOffset = (boxes) => {
23
+ const tfras = boxes.filter((b) => b.type === 'tfra-box');
24
+ const firstMoofOffsets = tfras.map((f) => {
25
+ return f.entries[0].moofOffset;
26
+ });
27
+ return Math.max(...firstMoofOffsets.filter(truthy_1.truthy));
28
+ };
29
+ exports.getMaxFirstMoofOffset = getMaxFirstMoofOffset;
@@ -2,3 +2,6 @@ import type { ParserState } from './parser-state';
2
2
  export declare const maySkipVideoData: ({ state }: {
3
3
  state: ParserState;
4
4
  }) => boolean;
5
+ export declare const maySkipOverSamplesInTheMiddle: ({ state, }: {
6
+ state: ParserState;
7
+ }) => boolean;
@@ -1,18 +1,33 @@
1
1
  "use strict";
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
- exports.maySkipVideoData = void 0;
3
+ exports.maySkipOverSamplesInTheMiddle = exports.maySkipVideoData = void 0;
4
4
  const need_samples_for_fields_1 = require("./need-samples-for-fields");
5
- const maySkipVideoData = ({ state }) => {
6
- const hasAllTracksAndNoCallbacks = state.callbacks.tracks.hasAllTracks() &&
7
- Object.values(state.callbacks.videoSampleCallbacks).length === 0 &&
8
- Object.values(state.callbacks.audioSampleCallbacks).length === 0;
5
+ const getHasCallbacks = (state) => {
9
6
  const hasNoTrackHandlers = !state.callbacks.hasAudioTrackHandlers &&
10
7
  !state.callbacks.hasVideoTrackHandlers;
11
- const noCallbacksNeeded = hasNoTrackHandlers || hasAllTracksAndNoCallbacks;
12
- return (noCallbacksNeeded &&
8
+ if (hasNoTrackHandlers) {
9
+ return false;
10
+ }
11
+ const hasAllTracksAndNoCallbacks = !state.callbacks.tracks.hasAllTracks() ||
12
+ Object.values(state.callbacks.videoSampleCallbacks).length > 0 ||
13
+ Object.values(state.callbacks.audioSampleCallbacks).length > 0;
14
+ return hasAllTracksAndNoCallbacks;
15
+ };
16
+ const maySkipVideoData = ({ state }) => {
17
+ const hasCallbacks = getHasCallbacks(state);
18
+ return (!hasCallbacks &&
13
19
  !(0, need_samples_for_fields_1.needsToIterateOverSamples)({
14
20
  emittedFields: state.emittedFields,
15
21
  fields: state.fields,
16
22
  }));
17
23
  };
18
24
  exports.maySkipVideoData = maySkipVideoData;
25
+ const maySkipOverSamplesInTheMiddle = ({ state, }) => {
26
+ const hasCallbacks = getHasCallbacks(state);
27
+ return (!hasCallbacks &&
28
+ !(0, need_samples_for_fields_1.needsToIterateOverEverySample)({
29
+ emittedFields: state.emittedFields,
30
+ fields: state.fields,
31
+ }));
32
+ };
33
+ exports.maySkipOverSamplesInTheMiddle = maySkipOverSamplesInTheMiddle;
@@ -4,3 +4,7 @@ export declare const needsToIterateOverSamples: ({ fields, emittedFields, }: {
4
4
  fields: Options<ParseMediaFields>;
5
5
  emittedFields: AllOptions<ParseMediaFields>;
6
6
  }) => boolean;
7
+ export declare const needsToIterateOverEverySample: ({ fields, emittedFields, }: {
8
+ fields: Options<ParseMediaFields>;
9
+ emittedFields: AllOptions<ParseMediaFields>;
10
+ }) => boolean;
@@ -1,6 +1,6 @@
1
1
  "use strict";
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
- exports.needsToIterateOverSamples = exports.fieldsNeedSamplesMap = void 0;
3
+ exports.needsToIterateOverEverySample = exports.needsToIterateOverSamples = exports.fieldsNeedSamplesMap = void 0;
4
4
  exports.fieldsNeedSamplesMap = {
5
5
  slowDurationInSeconds: true,
6
6
  slowFps: true,
@@ -37,3 +37,14 @@ const needsToIterateOverSamples = ({ fields, emittedFields, }) => {
37
37
  return selectedKeys.some((k) => exports.fieldsNeedSamplesMap[k] && !emittedFields[k]);
38
38
  };
39
39
  exports.needsToIterateOverSamples = needsToIterateOverSamples;
40
+ // For duration, we only need the first and last sample
41
+ const fieldsNeedEverySampleMap = {
42
+ ...exports.fieldsNeedSamplesMap,
43
+ slowDurationInSeconds: false,
44
+ };
45
+ const needsToIterateOverEverySample = ({ fields, emittedFields, }) => {
46
+ const keys = Object.keys(fields !== null && fields !== void 0 ? fields : {});
47
+ const selectedKeys = keys.filter((k) => fields[k]);
48
+ return selectedKeys.some((k) => fieldsNeedEverySampleMap[k] && !emittedFields[k]);
49
+ };
50
+ exports.needsToIterateOverEverySample = needsToIterateOverEverySample;
@@ -316,6 +316,7 @@ export declare const makeParserState: ({ hasAudioTrackHandlers, hasVideoTrackHan
316
316
  getVideoBitrate: () => number | null;
317
317
  getLastSampleObserved: () => boolean;
318
318
  setLastSampleObserved: () => void;
319
+ getAmountOfSamplesObserved: () => number;
319
320
  };
320
321
  contentLength: number;
321
322
  images: {
@@ -9,5 +9,6 @@ export declare const samplesObservedState: () => {
9
9
  getVideoBitrate: () => number | null;
10
10
  getLastSampleObserved: () => boolean;
11
11
  setLastSampleObserved: () => void;
12
+ getAmountOfSamplesObserved: () => number;
12
13
  };
13
14
  export type SamplesObservedState = ReturnType<typeof samplesObservedState>;
@@ -10,30 +10,27 @@ const samplesObservedState = () => {
10
10
  const videoSamples = new Map();
11
11
  const audioSamples = new Map();
12
12
  const getSlowVideoDurationInSeconds = () => {
13
- let videoDuration = null;
14
- if (smallestVideoSample !== undefined && largestVideoSample !== undefined) {
15
- const startingTimestampDifference = largestVideoSample - smallestVideoSample;
16
- const timeBetweenSamples = startingTimestampDifference / (videoSamples.size - 1);
17
- videoDuration = timeBetweenSamples * videoSamples.size;
18
- }
19
- return videoDuration;
13
+ return (largestVideoSample !== null && largestVideoSample !== void 0 ? largestVideoSample : 0) - (smallestVideoSample !== null && smallestVideoSample !== void 0 ? smallestVideoSample : 0);
14
+ };
15
+ const getSlowAudioDurationInSeconds = () => {
16
+ return (largestAudioSample !== null && largestAudioSample !== void 0 ? largestAudioSample : 0) - (smallestAudioSample !== null && smallestAudioSample !== void 0 ? smallestAudioSample : 0);
20
17
  };
21
18
  const getSlowDurationInSeconds = () => {
22
- const videoDuration = getSlowVideoDurationInSeconds();
23
- let audioDuration = null;
24
- if (smallestAudioSample !== undefined && largestAudioSample !== undefined) {
25
- const startingTimestampDifferenceAudio = largestAudioSample - smallestAudioSample;
26
- const timeBetweenSamplesAudio = startingTimestampDifferenceAudio / (audioSamples.size - 1);
27
- audioDuration = timeBetweenSamplesAudio * audioSamples.size;
19
+ const smallestSample = Math.min(smallestAudioSample !== null && smallestAudioSample !== void 0 ? smallestAudioSample : Infinity, smallestVideoSample !== null && smallestVideoSample !== void 0 ? smallestVideoSample : Infinity);
20
+ const largestSample = Math.max(largestAudioSample !== null && largestAudioSample !== void 0 ? largestAudioSample : 0, largestVideoSample !== null && largestVideoSample !== void 0 ? largestVideoSample : 0);
21
+ if (smallestSample === Infinity || largestSample === Infinity) {
22
+ return 0;
28
23
  }
29
- return Math.max(videoDuration !== null && videoDuration !== void 0 ? videoDuration : 0, audioDuration !== null && audioDuration !== void 0 ? audioDuration : 0);
24
+ return largestSample - smallestSample;
30
25
  };
31
26
  const addVideoSample = (videoSample) => {
27
+ var _a;
32
28
  videoSamples.set(videoSample.cts, videoSample.data.byteLength);
33
29
  const presentationTimeInSeconds = videoSample.cts / videoSample.timescale;
30
+ const duration = ((_a = videoSample.duration) !== null && _a !== void 0 ? _a : 0) / videoSample.timescale;
34
31
  if (largestVideoSample === undefined ||
35
32
  presentationTimeInSeconds > largestVideoSample) {
36
- largestVideoSample = presentationTimeInSeconds;
33
+ largestVideoSample = presentationTimeInSeconds + duration;
37
34
  }
38
35
  if (smallestVideoSample === undefined ||
39
36
  presentationTimeInSeconds < smallestVideoSample) {
@@ -41,11 +38,13 @@ const samplesObservedState = () => {
41
38
  }
42
39
  };
43
40
  const addAudioSample = (audioSample) => {
41
+ var _a;
44
42
  audioSamples.set(audioSample.cts, audioSample.data.byteLength);
45
43
  const presentationTimeInSeconds = audioSample.cts / audioSample.timescale;
44
+ const duration = ((_a = audioSample.duration) !== null && _a !== void 0 ? _a : 0) / audioSample.timescale;
46
45
  if (largestAudioSample === undefined ||
47
46
  presentationTimeInSeconds > largestAudioSample) {
48
- largestAudioSample = presentationTimeInSeconds;
47
+ largestAudioSample = presentationTimeInSeconds + duration;
49
48
  }
50
49
  if (smallestAudioSample === undefined ||
51
50
  presentationTimeInSeconds < smallestAudioSample) {
@@ -53,16 +52,15 @@ const samplesObservedState = () => {
53
52
  }
54
53
  };
55
54
  const getFps = () => {
56
- var _a;
57
- const videoDuration = (_a = getSlowVideoDurationInSeconds()) !== null && _a !== void 0 ? _a : 0;
55
+ const videoDuration = (largestVideoSample !== null && largestVideoSample !== void 0 ? largestVideoSample : 0) - (smallestVideoSample !== null && smallestVideoSample !== void 0 ? smallestVideoSample : 0);
58
56
  if (videoDuration === 0) {
59
57
  return 0;
60
58
  }
61
- return videoSamples.size / videoDuration;
59
+ return (videoSamples.size - 1) / videoDuration;
62
60
  };
63
61
  const getSlowNumberOfFrames = () => videoSamples.size;
64
62
  const getAudioBitrate = () => {
65
- const audioDuration = getSlowDurationInSeconds();
63
+ const audioDuration = getSlowAudioDurationInSeconds();
66
64
  if (audioDuration === 0 || audioSamples.size === 0) {
67
65
  return null;
68
66
  }
@@ -70,7 +68,7 @@ const samplesObservedState = () => {
70
68
  return (audioSizesInBytes * 8) / audioDuration;
71
69
  };
72
70
  const getVideoBitrate = () => {
73
- const videoDuration = getSlowDurationInSeconds();
71
+ const videoDuration = getSlowVideoDurationInSeconds();
74
72
  if (videoDuration === 0 || videoSamples.size === 0) {
75
73
  return null;
76
74
  }
@@ -91,6 +89,7 @@ const samplesObservedState = () => {
91
89
  getVideoBitrate,
92
90
  getLastSampleObserved,
93
91
  setLastSampleObserved,
92
+ getAmountOfSamplesObserved: () => videoSamples.size + audioSamples.size,
94
93
  };
95
94
  };
96
95
  exports.samplesObservedState = samplesObservedState;
package/dist/version.d.ts CHANGED
@@ -1 +1 @@
1
- export declare const VERSION = "4.0.295";
1
+ export declare const VERSION = "4.0.296";
package/dist/version.js CHANGED
@@ -2,4 +2,4 @@
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
3
  exports.VERSION = void 0;
4
4
  // Automatically generated on publish
5
- exports.VERSION = '4.0.295';
5
+ exports.VERSION = '4.0.296';
package/package.json CHANGED
@@ -3,15 +3,15 @@
3
3
  "url": "https://github.com/remotion-dev/remotion/tree/main/packages/media-parser"
4
4
  },
5
5
  "name": "@remotion/media-parser",
6
- "version": "4.0.295",
6
+ "version": "4.0.296",
7
7
  "main": "dist/index.js",
8
8
  "sideEffects": false,
9
9
  "devDependencies": {
10
10
  "@types/wicg-file-system-access": "2023.10.5",
11
11
  "eslint": "9.19.0",
12
12
  "@types/bun": "1.2.8",
13
- "@remotion/example-videos": "4.0.295",
14
- "@remotion/eslint-config-internal": "4.0.295"
13
+ "@remotion/example-videos": "4.0.296",
14
+ "@remotion/eslint-config-internal": "4.0.296"
15
15
  },
16
16
  "publishConfig": {
17
17
  "access": "public"