mediabunny 1.0.4 → 1.0.6

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -6224,6 +6224,9 @@ ${cue.notes ?? ""}`;
6224
6224
  return null;
6225
6225
  }
6226
6226
  reader.pos += 1;
6227
+ if (firstByte !== 255) {
6228
+ return null;
6229
+ }
6227
6230
  if ((secondByte & 224) !== 224) {
6228
6231
  return null;
6229
6232
  }
@@ -11823,6 +11826,7 @@ ${cue.notes ?? ""}`;
11823
11826
  fragmentLookupTable: null,
11824
11827
  currentFragmentState: null,
11825
11828
  fragments: [],
11829
+ fragmentsWithKeyFrame: [],
11826
11830
  editListPreviousSegmentDurations: 0,
11827
11831
  editListOffset: 0
11828
11832
  };
@@ -11901,14 +11905,18 @@ ${cue.notes ?? ""}`;
11901
11905
  continue;
11902
11906
  }
11903
11907
  if (relevantEntryFound) {
11904
- throw new Error("Unsupported edit list: multiple edits are not supported.");
11908
+ console.warn(
11909
+ "Unsupported edit list: multiple edits are not currently supported. Only using first edit."
11910
+ );
11911
+ break;
11905
11912
  }
11906
11913
  if (mediaTime === -1) {
11907
11914
  previousSegmentDurations += segmentDuration;
11908
11915
  continue;
11909
11916
  }
11910
11917
  if (mediaRate !== 1) {
11911
- throw new Error("Unsupported edit list: media rate must be 1.");
11918
+ console.warn("Unsupported edit list entry: media rate must be 1.");
11919
+ break;
11912
11920
  }
11913
11921
  track.editListPreviousSegmentDurations = previousSegmentDurations;
11914
11922
  track.editListOffset = mediaTime;
@@ -12096,7 +12104,8 @@ ${cue.notes ?? ""}`;
12096
12104
  } else if (sampleSize === 16) {
12097
12105
  track.info.codec = "pcm-s16be";
12098
12106
  } else {
12099
- throw new Error(`Unsupported sample size ${sampleSize} for codec 'twos'.`);
12107
+ console.warn(`Unsupported sample size ${sampleSize} for codec 'twos'.`);
12108
+ track.info.codec = null;
12100
12109
  }
12101
12110
  } else if (lowercaseBoxName === "sowt") {
12102
12111
  if (sampleSize === 8) {
@@ -12104,7 +12113,8 @@ ${cue.notes ?? ""}`;
12104
12113
  } else if (sampleSize === 16) {
12105
12114
  track.info.codec = "pcm-s16";
12106
12115
  } else {
12107
- throw new Error(`Unsupported sample size ${sampleSize} for codec 'sowt'.`);
12116
+ console.warn(`Unsupported sample size ${sampleSize} for codec 'sowt'.`);
12117
+ track.info.codec = null;
12108
12118
  }
12109
12119
  } else if (lowercaseBoxName === "raw ") {
12110
12120
  track.info.codec = "pcm-u8";
@@ -12324,7 +12334,8 @@ ${cue.notes ?? ""}`;
12324
12334
  } else if (pcmSampleSize === 32) {
12325
12335
  track.info.codec = "pcm-s32";
12326
12336
  } else {
12327
- throw new Error(`Invalid ipcm sample size ${pcmSampleSize}.`);
12337
+ console.warn(`Invalid ipcm sample size ${pcmSampleSize}.`);
12338
+ track.info.codec = null;
12328
12339
  }
12329
12340
  } else {
12330
12341
  if (pcmSampleSize === 16) {
@@ -12334,7 +12345,8 @@ ${cue.notes ?? ""}`;
12334
12345
  } else if (pcmSampleSize === 32) {
12335
12346
  track.info.codec = "pcm-s32be";
12336
12347
  } else {
12337
- throw new Error(`Invalid ipcm sample size ${pcmSampleSize}.`);
12348
+ console.warn(`Invalid ipcm sample size ${pcmSampleSize}.`);
12349
+ track.info.codec = null;
12338
12350
  }
12339
12351
  }
12340
12352
  } else if (track.info.codec === "pcm-f32be") {
@@ -12344,7 +12356,8 @@ ${cue.notes ?? ""}`;
12344
12356
  } else if (pcmSampleSize === 64) {
12345
12357
  track.info.codec = "pcm-f64";
12346
12358
  } else {
12347
- throw new Error(`Invalid fpcm sample size ${pcmSampleSize}.`);
12359
+ console.warn(`Invalid fpcm sample size ${pcmSampleSize}.`);
12360
+ track.info.codec = null;
12348
12361
  }
12349
12362
  } else {
12350
12363
  if (pcmSampleSize === 32) {
@@ -12352,7 +12365,8 @@ ${cue.notes ?? ""}`;
12352
12365
  } else if (pcmSampleSize === 64) {
12353
12366
  track.info.codec = "pcm-f64be";
12354
12367
  } else {
12355
- throw new Error(`Invalid fpcm sample size ${pcmSampleSize}.`);
12368
+ console.warn(`Invalid fpcm sample size ${pcmSampleSize}.`);
12369
+ track.info.codec = null;
12356
12370
  }
12357
12371
  }
12358
12372
  }
@@ -12502,9 +12516,24 @@ ${cue.notes ?? ""}`;
12502
12516
  break;
12503
12517
  case "stz2":
12504
12518
  {
12505
- throw new Error("Unsupported.");
12519
+ const track = this.currentTrack;
12520
+ assert(track);
12521
+ if (!track.sampleTable) {
12522
+ break;
12523
+ }
12524
+ this.metadataReader.pos += 4;
12525
+ this.metadataReader.pos += 3;
12526
+ const fieldSize = this.metadataReader.readU8();
12527
+ const sampleCount = this.metadataReader.readU32();
12528
+ const bytes2 = this.metadataReader.readBytes(Math.ceil(sampleCount * fieldSize / 8));
12529
+ const bitstream = new Bitstream(bytes2);
12530
+ for (let i = 0; i < sampleCount; i++) {
12531
+ const sampleSize = bitstream.readBits(fieldSize);
12532
+ track.sampleTable.sampleSizes.push(sampleSize);
12533
+ }
12506
12534
  }
12507
12535
  ;
12536
+ break;
12508
12537
  case "stss":
12509
12538
  {
12510
12539
  const track = this.currentTrack;
@@ -12702,6 +12731,15 @@ ${cue.notes ?? ""}`;
12702
12731
  (x) => x.moofOffset
12703
12732
  );
12704
12733
  this.currentTrack.fragments.splice(insertionIndex + 1, 0, this.currentFragment);
12734
+ const hasKeyFrame = trackData.firstKeyFrameTimestamp !== null;
12735
+ if (hasKeyFrame) {
12736
+ const insertionIndex2 = binarySearchLessOrEqual(
12737
+ this.currentTrack.fragmentsWithKeyFrame,
12738
+ this.currentFragment.moofOffset,
12739
+ (x) => x.moofOffset
12740
+ );
12741
+ this.currentTrack.fragmentsWithKeyFrame.splice(insertionIndex2 + 1, 0, this.currentFragment);
12742
+ }
12705
12743
  const { currentFragmentState } = this.currentTrack;
12706
12744
  assert(currentFragmentState);
12707
12745
  if (currentFragmentState.startTimestamp !== null) {
@@ -12788,7 +12826,8 @@ ${cue.notes ?? ""}`;
12788
12826
  assert(this.currentFragment);
12789
12827
  assert(track.currentFragmentState);
12790
12828
  if (this.currentFragment.trackData.has(track.id)) {
12791
- throw new Error("Can't have two trun boxes for the same track in one fragment.");
12829
+ console.warn("Can't have two trun boxes for the same track in one fragment. Ignoring...");
12830
+ break;
12792
12831
  }
12793
12832
  const version = this.metadataReader.readU8();
12794
12833
  const flags = this.metadataReader.readU24();
@@ -12816,6 +12855,7 @@ ${cue.notes ?? ""}`;
12816
12855
  const trackData = {
12817
12856
  startTimestamp: 0,
12818
12857
  endTimestamp: 0,
12858
+ firstKeyFrameTimestamp: null,
12819
12859
  samples: [],
12820
12860
  presentationTimestamps: [],
12821
12861
  startTimestampIsFinal: false
@@ -12866,11 +12906,16 @@ ${cue.notes ?? ""}`;
12866
12906
  currentTimestamp += sampleDuration;
12867
12907
  }
12868
12908
  trackData.presentationTimestamps = trackData.samples.map((x, i) => ({ presentationTimestamp: x.presentationTimestamp, sampleIndex: i })).sort((a, b) => a.presentationTimestamp - b.presentationTimestamp);
12869
- for (let i = 0; i < trackData.presentationTimestamps.length - 1; i++) {
12870
- const current = trackData.presentationTimestamps[i];
12871
- const next = trackData.presentationTimestamps[i + 1];
12872
- const duration = next.presentationTimestamp - current.presentationTimestamp;
12873
- trackData.samples[current.sampleIndex].duration = duration;
12909
+ for (let i = 0; i < trackData.presentationTimestamps.length; i++) {
12910
+ const currentEntry = trackData.presentationTimestamps[i];
12911
+ const currentSample = trackData.samples[currentEntry.sampleIndex];
12912
+ if (trackData.firstKeyFrameTimestamp === null && currentSample.isKeyFrame) {
12913
+ trackData.firstKeyFrameTimestamp = currentSample.presentationTimestamp;
12914
+ }
12915
+ if (i < trackData.presentationTimestamps.length - 1) {
12916
+ const nextEntry = trackData.presentationTimestamps[i + 1];
12917
+ currentSample.duration = nextEntry.presentationTimestamp - currentEntry.presentationTimestamp;
12918
+ }
12874
12919
  }
12875
12920
  const firstSample = trackData.samples[trackData.presentationTimestamps[0].sampleIndex];
12876
12921
  const lastSample = trackData.samples[last(trackData.presentationTimestamps).sampleIndex];
@@ -13069,7 +13114,7 @@ ${cue.notes ?? ""}`;
13069
13114
  while (currentFragment.nextFragment) {
13070
13115
  currentFragment = currentFragment.nextFragment;
13071
13116
  const trackData2 = currentFragment.trackData.get(this.internalTrack.id);
13072
- if (trackData2) {
13117
+ if (trackData2 && trackData2.firstKeyFrameTimestamp !== null) {
13073
13118
  const fragmentIndex2 = binarySearchExact(
13074
13119
  this.internalTrack.fragments,
13075
13120
  currentFragment.moofOffset,
@@ -13077,9 +13122,7 @@ ${cue.notes ?? ""}`;
13077
13122
  );
13078
13123
  assert(fragmentIndex2 !== -1);
13079
13124
  const keyFrameIndex = trackData2.samples.findIndex((x) => x.isKeyFrame);
13080
- if (keyFrameIndex === -1) {
13081
- throw new Error("Not supported: Fragment does not contain key sample.");
13082
- }
13125
+ assert(keyFrameIndex !== -1);
13083
13126
  return {
13084
13127
  fragmentIndex: fragmentIndex2,
13085
13128
  sampleIndex: keyFrameIndex,
@@ -13192,24 +13235,29 @@ ${cue.notes ?? ""}`;
13192
13235
  return { fragmentIndex, sampleIndex, correctSampleFound };
13193
13236
  }
13194
13237
  findKeySampleInFragmentsForTimestamp(timestampInTimescale) {
13195
- const fragmentIndex = binarySearchLessOrEqual(
13238
+ const indexInKeyFrameFragments = binarySearchLessOrEqual(
13196
13239
  // This array is technically not sorted by start timestamp, but for any reasonable file, it basically is.
13197
- this.internalTrack.fragments,
13240
+ this.internalTrack.fragmentsWithKeyFrame,
13198
13241
  timestampInTimescale,
13199
13242
  (x) => x.trackData.get(this.internalTrack.id).startTimestamp
13200
13243
  );
13244
+ let fragmentIndex = -1;
13201
13245
  let sampleIndex = -1;
13202
13246
  let correctSampleFound = false;
13203
- if (fragmentIndex !== -1) {
13204
- const fragment = this.internalTrack.fragments[fragmentIndex];
13247
+ if (indexInKeyFrameFragments !== -1) {
13248
+ const fragment = this.internalTrack.fragmentsWithKeyFrame[indexInKeyFrameFragments];
13249
+ fragmentIndex = binarySearchExact(
13250
+ this.internalTrack.fragments,
13251
+ fragment.moofOffset,
13252
+ (x) => x.moofOffset
13253
+ );
13254
+ assert(fragmentIndex !== -1);
13205
13255
  const trackData = fragment.trackData.get(this.internalTrack.id);
13206
13256
  const index = findLastIndex(trackData.presentationTimestamps, (x) => {
13207
13257
  const sample = trackData.samples[x.sampleIndex];
13208
13258
  return sample.isKeyFrame && x.presentationTimestamp <= timestampInTimescale;
13209
13259
  });
13210
- if (index === -1) {
13211
- throw new Error("Not supported: Fragment does not begin with a key sample.");
13212
- }
13260
+ assert(index !== -1);
13213
13261
  const entry = trackData.presentationTimestamps[index];
13214
13262
  sampleIndex = entry.sampleIndex;
13215
13263
  correctSampleFound = timestampInTimescale < trackData.endTimestamp;
@@ -13732,20 +13780,15 @@ ${cue.notes ?? ""}`;
13732
13780
  trackData.blocks = sortBlocksByReferences(trackData.blocks);
13733
13781
  }
13734
13782
  trackData.presentationTimestamps = trackData.blocks.map((block, i) => ({ timestamp: block.timestamp, blockIndex: i })).sort((a, b) => a.timestamp - b.timestamp);
13735
- let hasKeyFrame = false;
13736
13783
  for (let i = 0; i < trackData.presentationTimestamps.length; i++) {
13737
- const entry = trackData.presentationTimestamps[i];
13738
- const block = trackData.blocks[entry.blockIndex];
13739
- if (block.isKeyFrame) {
13740
- hasKeyFrame = true;
13741
- if (trackData.firstKeyFrameTimestamp === null && block.isKeyFrame) {
13742
- trackData.firstKeyFrameTimestamp = block.timestamp;
13743
- }
13784
+ const currentEntry = trackData.presentationTimestamps[i];
13785
+ const currentBlock = trackData.blocks[currentEntry.blockIndex];
13786
+ if (trackData.firstKeyFrameTimestamp === null && currentBlock.isKeyFrame) {
13787
+ trackData.firstKeyFrameTimestamp = currentBlock.timestamp;
13744
13788
  }
13745
13789
  if (i < trackData.presentationTimestamps.length - 1) {
13746
13790
  const nextEntry = trackData.presentationTimestamps[i + 1];
13747
- const nextBlock = trackData.blocks[nextEntry.blockIndex];
13748
- block.duration = nextBlock.timestamp - block.timestamp;
13791
+ currentBlock.duration = nextEntry.timestamp - currentBlock.timestamp;
13749
13792
  }
13750
13793
  }
13751
13794
  const firstBlock = trackData.blocks[trackData.presentationTimestamps[0].blockIndex];
@@ -13760,6 +13803,7 @@ ${cue.notes ?? ""}`;
13760
13803
  (x) => x.elementStartPos
13761
13804
  );
13762
13805
  track.clusters.splice(insertionIndex2 + 1, 0, cluster);
13806
+ const hasKeyFrame = trackData.firstKeyFrameTimestamp !== null;
13763
13807
  if (hasKeyFrame) {
13764
13808
  const insertionIndex3 = binarySearchLessOrEqual(
13765
13809
  track.clustersWithKeyFrame,
@@ -14798,7 +14842,7 @@ ${cue.notes ?? ""}`;
14798
14842
  readNextFrameHeader(until) {
14799
14843
  assert(this.fileSize);
14800
14844
  until ??= this.fileSize;
14801
- while (this.pos < until - FRAME_HEADER_SIZE) {
14845
+ while (this.pos <= until - FRAME_HEADER_SIZE) {
14802
14846
  const word = this.readU32();
14803
14847
  this.pos -= 4;
14804
14848
  const header = readFrameHeader(word, this);
@@ -15840,7 +15884,8 @@ ${cue.notes ?? ""}`;
15840
15884
  return true;
15841
15885
  }
15842
15886
  mp3Reader.pos = firstHeader.startPos + firstHeader.totalSize;
15843
- const secondHeader = mp3Reader.readNextFrameHeader(Math.min(framesStartPos + 4096, sourceSize));
15887
+ await mp3Reader.reader.loadRange(mp3Reader.pos, mp3Reader.pos + FRAME_HEADER_SIZE);
15888
+ const secondHeader = mp3Reader.readNextFrameHeader(mp3Reader.pos + FRAME_HEADER_SIZE);
15844
15889
  if (!secondHeader) {
15845
15890
  return false;
15846
15891
  }