mediabunny 1.4.4 → 1.5.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -5660,7 +5660,11 @@ var Mediabunny = (() => {
5660
5660
  readString(length) {
5661
5661
  const { view: view2, offset } = this.reader.getViewAndOffset(this.pos, this.pos + length);
5662
5662
  this.pos += length;
5663
- return String.fromCharCode(...new Uint8Array(view2.buffer, offset, length));
5663
+ let strLength = 0;
5664
+ while (strLength < length && view2.getUint8(offset + strLength) !== 0) {
5665
+ strLength += 1;
5666
+ }
5667
+ return String.fromCharCode(...new Uint8Array(view2.buffer, offset, strLength));
5664
5668
  }
5665
5669
  readElementId() {
5666
5670
  const size = this.readVarIntSize();
@@ -5726,6 +5730,27 @@ var Mediabunny = (() => {
5726
5730
  "pcm-f64": "A_PCM/FLOAT/IEEE",
5727
5731
  "webvtt": "S_TEXT/WEBVTT"
5728
5732
  };
5733
+ var readVarInt = (data, offset) => {
5734
+ if (offset >= data.length) {
5735
+ throw new Error("Offset out of bounds.");
5736
+ }
5737
+ const firstByte = data[offset];
5738
+ let width = 1;
5739
+ let mask = 1 << 7;
5740
+ while ((firstByte & mask) === 0 && width < 8) {
5741
+ width++;
5742
+ mask >>= 1;
5743
+ }
5744
+ if (offset + width > data.length) {
5745
+ throw new Error("VarInt extends beyond data bounds.");
5746
+ }
5747
+ let value = firstByte & mask - 1;
5748
+ for (let i = 1; i < width; i++) {
5749
+ value *= 1 << 8;
5750
+ value += data[offset + i];
5751
+ }
5752
+ return { value, width };
5753
+ };
5729
5754
  function assertDefinedSize(size) {
5730
5755
  if (size === null) {
5731
5756
  throw new Error("Undefined element size is used in a place where it is not supported.");
@@ -13636,7 +13661,6 @@ ${cue.notes ?? ""}`;
13636
13661
  throw new Error("Packet was not created from this track.");
13637
13662
  }
13638
13663
  const trackData = locationInFragment.fragment.trackData.get(this.internalTrack.id);
13639
- const fragmentSample = trackData.samples[locationInFragment.sampleIndex];
13640
13664
  const fragmentIndex = binarySearchExact(
13641
13665
  this.internalTrack.fragments,
13642
13666
  locationInFragment.fragment.moofOffset,
@@ -13677,7 +13701,8 @@ ${cue.notes ?? ""}`;
13677
13701
  };
13678
13702
  }
13679
13703
  },
13680
- fragmentSample.presentationTimestamp,
13704
+ -Infinity,
13705
+ // Use -Infinity as a search timestamp to avoid using the lookup entries
13681
13706
  Infinity,
13682
13707
  options
13683
13708
  );
@@ -13710,7 +13735,6 @@ ${cue.notes ?? ""}`;
13710
13735
  throw new Error("Packet was not created from this track.");
13711
13736
  }
13712
13737
  const trackData = locationInFragment.fragment.trackData.get(this.internalTrack.id);
13713
- const fragmentSample = trackData.samples[locationInFragment.sampleIndex];
13714
13738
  const fragmentIndex = binarySearchExact(
13715
13739
  this.internalTrack.fragments,
13716
13740
  locationInFragment.fragment.moofOffset,
@@ -13756,7 +13780,8 @@ ${cue.notes ?? ""}`;
13756
13780
  };
13757
13781
  }
13758
13782
  },
13759
- fragmentSample.presentationTimestamp,
13783
+ -Infinity,
13784
+ // Use -Infinity as a search timestamp to avoid using the lookup entries
13760
13785
  Infinity,
13761
13786
  options
13762
13787
  );
@@ -14401,12 +14426,15 @@ ${cue.notes ?? ""}`;
14401
14426
  this.currentCluster = cluster;
14402
14427
  this.readContiguousElements(this.clusterReader, size);
14403
14428
  for (const [trackId, trackData] of cluster.trackData) {
14404
- let blockReferencesExist = false;
14429
+ const track = segment.tracks.find((x) => x.id === trackId) ?? null;
14405
14430
  assert(trackData.blocks.length > 0);
14431
+ let blockReferencesExist = false;
14432
+ let hasLacedBlocks = false;
14406
14433
  for (let i = 0; i < trackData.blocks.length; i++) {
14407
14434
  const block = trackData.blocks[i];
14408
14435
  block.timestamp += cluster.timestamp;
14409
14436
  blockReferencesExist ||= block.referencedTimestamps.length > 0;
14437
+ hasLacedBlocks ||= block.lacing !== 0 /* None */;
14410
14438
  }
14411
14439
  if (blockReferencesExist) {
14412
14440
  trackData.blocks = sortBlocksByReferences(trackData.blocks);
@@ -14421,13 +14449,23 @@ ${cue.notes ?? ""}`;
14421
14449
  if (i < trackData.presentationTimestamps.length - 1) {
14422
14450
  const nextEntry = trackData.presentationTimestamps[i + 1];
14423
14451
  currentBlock.duration = nextEntry.timestamp - currentBlock.timestamp;
14452
+ } else if (currentBlock.duration === 0) {
14453
+ if (track?.defaultDuration != null) {
14454
+ if (currentBlock.lacing === 0 /* None */) {
14455
+ currentBlock.duration = track.defaultDuration;
14456
+ } else {
14457
+ }
14458
+ }
14424
14459
  }
14425
14460
  }
14461
+ if (hasLacedBlocks) {
14462
+ this.expandLacedBlocks(trackData.blocks, track);
14463
+ trackData.presentationTimestamps = trackData.blocks.map((block, i) => ({ timestamp: block.timestamp, blockIndex: i })).sort((a, b) => a.timestamp - b.timestamp);
14464
+ }
14426
14465
  const firstBlock = trackData.blocks[trackData.presentationTimestamps[0].blockIndex];
14427
14466
  const lastBlock = trackData.blocks[last(trackData.presentationTimestamps).blockIndex];
14428
14467
  trackData.startTimestamp = firstBlock.timestamp;
14429
14468
  trackData.endTimestamp = lastBlock.timestamp + lastBlock.duration;
14430
- const track = segment.tracks.find((x) => x.id === trackId);
14431
14469
  if (track) {
14432
14470
  const insertionIndex2 = binarySearchLessOrEqual(
14433
14471
  track.clusters,
@@ -14469,6 +14507,95 @@ ${cue.notes ?? ""}`;
14469
14507
  }
14470
14508
  return trackData;
14471
14509
  }
14510
+ expandLacedBlocks(blocks, track) {
14511
+ for (let blockIndex = 0; blockIndex < blocks.length; blockIndex++) {
14512
+ const originalBlock = blocks[blockIndex];
14513
+ if (originalBlock.lacing === 0 /* None */) {
14514
+ continue;
14515
+ }
14516
+ const data = originalBlock.data;
14517
+ let pos = 0;
14518
+ const frameSizes = [];
14519
+ const frameCount = data[pos] + 1;
14520
+ pos++;
14521
+ switch (originalBlock.lacing) {
14522
+ case 1 /* Xiph */:
14523
+ {
14524
+ let totalUsedSize = 0;
14525
+ for (let i = 0; i < frameCount - 1; i++) {
14526
+ let frameSize = 0;
14527
+ while (pos < data.length) {
14528
+ const value = data[pos];
14529
+ frameSize += value;
14530
+ pos++;
14531
+ if (value < 255) {
14532
+ frameSizes.push(frameSize);
14533
+ totalUsedSize += frameSize;
14534
+ break;
14535
+ }
14536
+ }
14537
+ }
14538
+ frameSizes.push(data.length - (pos + totalUsedSize));
14539
+ }
14540
+ ;
14541
+ break;
14542
+ case 2 /* FixedSize */:
14543
+ {
14544
+ const totalDataSize = data.length - 1;
14545
+ const frameSize = Math.floor(totalDataSize / frameCount);
14546
+ for (let i = 0; i < frameCount; i++) {
14547
+ frameSizes.push(frameSize);
14548
+ }
14549
+ }
14550
+ ;
14551
+ break;
14552
+ case 3 /* Ebml */:
14553
+ {
14554
+ const firstResult = readVarInt(data, pos);
14555
+ let currentSize = firstResult.value;
14556
+ frameSizes.push(currentSize);
14557
+ pos += firstResult.width;
14558
+ let totalUsedSize = currentSize;
14559
+ for (let i = 1; i < frameCount - 1; i++) {
14560
+ const diffResult = readVarInt(data, pos);
14561
+ const unsignedDiff = diffResult.value;
14562
+ const bias = (1 << diffResult.width * 7 - 1) - 1;
14563
+ const diff = unsignedDiff - bias;
14564
+ currentSize += diff;
14565
+ frameSizes.push(currentSize);
14566
+ pos += diffResult.width;
14567
+ totalUsedSize += currentSize;
14568
+ }
14569
+ frameSizes.push(data.length - (pos + totalUsedSize));
14570
+ }
14571
+ ;
14572
+ break;
14573
+ default:
14574
+ assert(false);
14575
+ }
14576
+ assert(frameSizes.length === frameCount);
14577
+ blocks.splice(blockIndex, 1);
14578
+ let dataOffset = pos;
14579
+ for (let i = 0; i < frameCount; i++) {
14580
+ const frameSize = frameSizes[i];
14581
+ const frameData = data.subarray(dataOffset, dataOffset + frameSize);
14582
+ const blockDuration = originalBlock.duration || frameCount * (track?.defaultDuration ?? 0);
14583
+ const frameTimestamp = originalBlock.timestamp + blockDuration * i / frameCount;
14584
+ const frameDuration = blockDuration / frameCount;
14585
+ blocks.splice(blockIndex + i, 0, {
14586
+ timestamp: frameTimestamp,
14587
+ duration: frameDuration,
14588
+ isKeyFrame: originalBlock.isKeyFrame,
14589
+ referencedTimestamps: originalBlock.referencedTimestamps,
14590
+ data: frameData,
14591
+ lacing: 0 /* None */
14592
+ });
14593
+ dataOffset += frameSize;
14594
+ }
14595
+ blockIndex += frameCount;
14596
+ blockIndex--;
14597
+ }
14598
+ }
14472
14599
  readContiguousElements(reader, totalSize) {
14473
14600
  const startIndex = reader.pos;
14474
14601
  while (reader.pos - startIndex <= totalSize - MIN_HEADER_SIZE) {
@@ -14543,6 +14670,7 @@ ${cue.notes ?? ""}`;
14543
14670
  inputTrack: null,
14544
14671
  codecId: null,
14545
14672
  codecPrivate: null,
14673
+ defaultDuration: null,
14546
14674
  languageCode: UNDETERMINED_LANGUAGE,
14547
14675
  info: null
14548
14676
  };
@@ -14690,6 +14818,13 @@ ${cue.notes ?? ""}`;
14690
14818
  }
14691
14819
  ;
14692
14820
  break;
14821
+ case 2352003 /* DefaultDuration */:
14822
+ {
14823
+ if (!this.currentTrack) break;
14824
+ this.currentTrack.defaultDuration = this.currentTrack.segment.timestampFactor * reader.readUnsignedInt(size) / 1e9;
14825
+ }
14826
+ ;
14827
+ break;
14693
14828
  case 2274716 /* Language */:
14694
14829
  {
14695
14830
  if (!this.currentTrack) break;
@@ -14868,14 +15003,17 @@ ${cue.notes ?? ""}`;
14868
15003
  const relativeTimestamp = reader.readS16();
14869
15004
  const flags = reader.readU8();
14870
15005
  const isKeyFrame = !!(flags & 128);
15006
+ const lacing = flags >> 1 & 3;
14871
15007
  const trackData = this.getTrackDataInCluster(this.currentCluster, trackNumber);
14872
15008
  trackData.blocks.push({
14873
15009
  timestamp: relativeTimestamp,
14874
15010
  // We'll add the cluster's timestamp to this later
14875
15011
  duration: 0,
15012
+ // Will set later
14876
15013
  isKeyFrame,
14877
15014
  referencedTimestamps: [],
14878
- data: reader.readBytes(size - (reader.pos - dataStartPos))
15015
+ data: reader.readBytes(size - (reader.pos - dataStartPos)),
15016
+ lacing
14879
15017
  });
14880
15018
  }
14881
15019
  ;
@@ -14899,14 +15037,17 @@ ${cue.notes ?? ""}`;
14899
15037
  const trackNumber = reader.readVarInt();
14900
15038
  const relativeTimestamp = reader.readS16();
14901
15039
  const flags = reader.readU8();
15040
+ const lacing = flags >> 1 & 3;
14902
15041
  const trackData = this.getTrackDataInCluster(this.currentCluster, trackNumber);
14903
15042
  this.currentBlock = {
14904
15043
  timestamp: relativeTimestamp,
14905
15044
  // We'll add the cluster's timestamp to this later
14906
15045
  duration: 0,
15046
+ // Will set later
14907
15047
  isKeyFrame: true,
14908
15048
  referencedTimestamps: [],
14909
- data: reader.readBytes(size - (reader.pos - dataStartPos))
15049
+ data: reader.readBytes(size - (reader.pos - dataStartPos)),
15050
+ lacing
14910
15051
  };
14911
15052
  trackData.blocks.push(this.currentBlock);
14912
15053
  }
@@ -15009,7 +15150,6 @@ ${cue.notes ?? ""}`;
15009
15150
  throw new Error("Packet was not created from this track.");
15010
15151
  }
15011
15152
  const trackData = locationInCluster.cluster.trackData.get(this.internalTrack.id);
15012
- const block = trackData.blocks[locationInCluster.blockIndex];
15013
15153
  const clusterIndex = binarySearchExact(
15014
15154
  this.internalTrack.clusters,
15015
15155
  locationInCluster.cluster.elementStartPos,
@@ -15050,7 +15190,8 @@ ${cue.notes ?? ""}`;
15050
15190
  };
15051
15191
  }
15052
15192
  },
15053
- block.timestamp,
15193
+ -Infinity,
15194
+ // Use -Infinity as a search timestamp to avoid using the cues
15054
15195
  Infinity,
15055
15196
  options
15056
15197
  );
@@ -15070,7 +15211,6 @@ ${cue.notes ?? ""}`;
15070
15211
  throw new Error("Packet was not created from this track.");
15071
15212
  }
15072
15213
  const trackData = locationInCluster.cluster.trackData.get(this.internalTrack.id);
15073
- const block = trackData.blocks[locationInCluster.blockIndex];
15074
15214
  const clusterIndex = binarySearchExact(
15075
15215
  this.internalTrack.clusters,
15076
15216
  locationInCluster.cluster.elementStartPos,
@@ -15116,7 +15256,8 @@ ${cue.notes ?? ""}`;
15116
15256
  };
15117
15257
  }
15118
15258
  },
15119
- block.timestamp,
15259
+ -Infinity,
15260
+ // Use -Infinity as a search timestamp to avoid using the cues
15120
15261
  Infinity,
15121
15262
  options
15122
15263
  );
@@ -16773,8 +16914,11 @@ ${cue.notes ?? ""}`;
16773
16914
  if (options.video?.rotate !== void 0 && ![0, 90, 180, 270].includes(options.video.rotate)) {
16774
16915
  throw new TypeError("options.video.rotate, when provided, must be 0, 90, 180 or 270.");
16775
16916
  }
16917
+ if (options.video?.frameRate !== void 0 && (!Number.isFinite(options.video.frameRate) || options.video.frameRate <= 0)) {
16918
+ throw new TypeError("options.video.frameRate, when provided, must be a finite positive number.");
16919
+ }
16776
16920
  if (options.audio !== void 0 && (!options.audio || typeof options.audio !== "object")) {
16777
- throw new TypeError("options.video, when provided, must be an object.");
16921
+ throw new TypeError("options.audio, when provided, must be an object.");
16778
16922
  }
16779
16923
  if (options.audio?.discard !== void 0 && typeof options.audio.discard !== "boolean") {
16780
16924
  throw new TypeError("options.audio.discard, when provided, must be a boolean.");
@@ -16941,7 +17085,7 @@ ${cue.notes ?? ""}`;
16941
17085
  height = ceilToMultipleOfTwo(this._options.video.height);
16942
17086
  }
16943
17087
  const firstTimestamp = await track.getFirstTimestamp();
16944
- const needsTranscode = !!this._options.video?.forceTranscode || this._startTimestamp > 0 || firstTimestamp < 0;
17088
+ const needsTranscode = !!this._options.video?.forceTranscode || this._startTimestamp > 0 || firstTimestamp < 0 || !!this._options.video?.frameRate;
16945
17089
  const needsRerender = width !== originalWidth || height !== originalHeight || totalRotation !== 0 && !outputSupportsRotation;
16946
17090
  let videoCodecs = this.output.format.getSupportedVideoCodecs();
16947
17091
  if (!needsTranscode && !this._options.video?.bitrate && !needsRerender && videoCodecs.includes(sourceCodec) && (!this._options.video?.codec || this._options.video?.codec === sourceCodec)) {
@@ -16992,9 +17136,9 @@ ${cue.notes ?? ""}`;
16992
17136
  bitrate,
16993
17137
  onEncodedPacket: (sample) => this._reportProgress(track.id, sample.timestamp + sample.duration)
16994
17138
  };
17139
+ const source = new VideoSampleSource(encodingConfig);
17140
+ videoSource = source;
16995
17141
  if (needsRerender) {
16996
- const source = new VideoSampleSource(encodingConfig);
16997
- videoSource = source;
16998
17142
  this._trackPromises.push((async () => {
16999
17143
  await this._started;
17000
17144
  const sink = new CanvasSink(track, {
@@ -17006,6 +17150,22 @@ ${cue.notes ?? ""}`;
17006
17150
  poolSize: 1
17007
17151
  });
17008
17152
  const iterator = sink.canvases(this._startTimestamp, this._endTimestamp);
17153
+ const frameRate = this._options.video?.frameRate;
17154
+ let lastCanvas = null;
17155
+ let lastCanvasTimestamp = null;
17156
+ let lastCanvasEndTimestamp = null;
17157
+ const padFrames = async (until) => {
17158
+ assert(lastCanvas);
17159
+ assert(frameRate !== void 0);
17160
+ const frameDifference = Math.round((until - lastCanvasTimestamp) * frameRate);
17161
+ for (let i = 1; i < frameDifference; i++) {
17162
+ const sample = new VideoSample(lastCanvas, {
17163
+ timestamp: lastCanvasTimestamp + i / frameRate,
17164
+ duration: 1 / frameRate
17165
+ });
17166
+ await source.add(sample);
17167
+ }
17168
+ };
17009
17169
  for await (const { canvas, timestamp, duration } of iterator) {
17010
17170
  if (this._synchronizer.shouldWait(track.id, timestamp)) {
17011
17171
  await this._synchronizer.wait(timestamp);
@@ -17013,30 +17173,98 @@ ${cue.notes ?? ""}`;
17013
17173
  if (this._canceled) {
17014
17174
  return;
17015
17175
  }
17176
+ let adjustedSampleTimestamp = Math.max(timestamp - this._startTimestamp, 0);
17177
+ lastCanvasEndTimestamp = timestamp + duration;
17178
+ if (frameRate !== void 0) {
17179
+ const alignedTimestamp = Math.floor(adjustedSampleTimestamp * frameRate) / frameRate;
17180
+ if (lastCanvas !== null) {
17181
+ if (alignedTimestamp <= lastCanvasTimestamp) {
17182
+ lastCanvas = canvas;
17183
+ lastCanvasTimestamp = alignedTimestamp;
17184
+ continue;
17185
+ } else {
17186
+ await padFrames(alignedTimestamp);
17187
+ }
17188
+ }
17189
+ adjustedSampleTimestamp = alignedTimestamp;
17190
+ }
17016
17191
  const sample = new VideoSample(canvas, {
17017
- timestamp: Math.max(timestamp - this._startTimestamp, 0),
17018
- duration
17192
+ timestamp: adjustedSampleTimestamp,
17193
+ duration: frameRate !== void 0 ? 1 / frameRate : duration
17019
17194
  });
17020
17195
  await source.add(sample);
17021
- sample.close();
17196
+ if (frameRate !== void 0) {
17197
+ lastCanvas = canvas;
17198
+ lastCanvasTimestamp = adjustedSampleTimestamp;
17199
+ } else {
17200
+ sample.close();
17201
+ }
17022
17202
  }
17203
+ if (lastCanvas) {
17204
+ assert(lastCanvasEndTimestamp !== null);
17205
+ assert(frameRate !== void 0);
17206
+ await padFrames(Math.floor(lastCanvasEndTimestamp * frameRate) / frameRate);
17207
+ }
17208
+ source.close();
17209
+ this._synchronizer.closeTrack(track.id);
17023
17210
  })());
17024
17211
  } else {
17025
- const source = new VideoSampleSource(encodingConfig);
17026
- videoSource = source;
17027
17212
  this._trackPromises.push((async () => {
17028
17213
  await this._started;
17029
17214
  const sink = new VideoSampleSink(track);
17215
+ const frameRate = this._options.video?.frameRate;
17216
+ let lastSample = null;
17217
+ let lastSampleTimestamp = null;
17218
+ let lastSampleEndTimestamp = null;
17219
+ const padFrames = async (until) => {
17220
+ assert(lastSample);
17221
+ assert(frameRate !== void 0);
17222
+ const frameDifference = Math.round((until - lastSampleTimestamp) * frameRate);
17223
+ for (let i = 1; i < frameDifference; i++) {
17224
+ lastSample.setTimestamp(lastSampleTimestamp + i / frameRate);
17225
+ lastSample.setDuration(1 / frameRate);
17226
+ await source.add(lastSample);
17227
+ }
17228
+ lastSample.close();
17229
+ };
17030
17230
  for await (const sample of sink.samples(this._startTimestamp, this._endTimestamp)) {
17031
17231
  if (this._synchronizer.shouldWait(track.id, sample.timestamp)) {
17032
17232
  await this._synchronizer.wait(sample.timestamp);
17033
17233
  }
17034
- sample.setTimestamp(Math.max(sample.timestamp - this._startTimestamp, 0));
17035
17234
  if (this._canceled) {
17235
+ lastSample?.close();
17036
17236
  return;
17037
17237
  }
17238
+ let adjustedSampleTimestamp = Math.max(sample.timestamp - this._startTimestamp, 0);
17239
+ lastSampleEndTimestamp = sample.timestamp + sample.duration;
17240
+ if (frameRate !== void 0) {
17241
+ const alignedTimestamp = Math.floor(adjustedSampleTimestamp * frameRate) / frameRate;
17242
+ if (lastSample !== null) {
17243
+ if (alignedTimestamp <= lastSampleTimestamp) {
17244
+ lastSample.close();
17245
+ lastSample = sample;
17246
+ lastSampleTimestamp = alignedTimestamp;
17247
+ continue;
17248
+ } else {
17249
+ await padFrames(alignedTimestamp);
17250
+ }
17251
+ }
17252
+ adjustedSampleTimestamp = alignedTimestamp;
17253
+ sample.setDuration(1 / frameRate);
17254
+ }
17255
+ sample.setTimestamp(adjustedSampleTimestamp);
17038
17256
  await source.add(sample);
17039
- sample.close();
17257
+ if (frameRate !== void 0) {
17258
+ lastSample = sample;
17259
+ lastSampleTimestamp = adjustedSampleTimestamp;
17260
+ } else {
17261
+ sample.close();
17262
+ }
17263
+ }
17264
+ if (lastSample) {
17265
+ assert(lastSampleEndTimestamp !== null);
17266
+ assert(frameRate !== void 0);
17267
+ await padFrames(Math.floor(lastSampleEndTimestamp * frameRate) / frameRate);
17040
17268
  }
17041
17269
  source.close();
17042
17270
  this._synchronizer.closeTrack(track.id);
@@ -17044,6 +17272,7 @@ ${cue.notes ?? ""}`;
17044
17272
  }
17045
17273
  }
17046
17274
  this.output.addVideoTrack(videoSource, {
17275
+ frameRate: this._options.video?.frameRate,
17047
17276
  languageCode: track.languageCode,
17048
17277
  rotation: needsRerender ? 0 : totalRotation
17049
17278
  // Rerendering will bake the rotation into the output