@remotion/media-parser 4.0.303 → 4.0.305

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (65) hide show
  1. package/dist/containers/aac/parse-aac.js +8 -6
  2. package/dist/containers/flac/parse-flac-frame.js +6 -6
  3. package/dist/containers/flac/parse-streaminfo.js +3 -1
  4. package/dist/containers/iso-base-media/find-keyframe-before-time.js +2 -2
  5. package/dist/containers/iso-base-media/get-keyframes.js +3 -3
  6. package/dist/containers/iso-base-media/get-sample-position-bounds.js +2 -2
  7. package/dist/containers/iso-base-media/get-seeking-byte-from-fragmented-mp4.js +3 -3
  8. package/dist/containers/iso-base-media/get-seeking-byte.js +1 -1
  9. package/dist/containers/iso-base-media/make-track.js +7 -3
  10. package/dist/containers/iso-base-media/mdat/calculate-jump-marks.js +3 -3
  11. package/dist/containers/iso-base-media/mdat/mdat.js +16 -16
  12. package/dist/containers/iso-base-media/parse-icc-profile.js +2 -2
  13. package/dist/containers/m3u/first-sample-in-m3u-chunk.js +2 -1
  14. package/dist/containers/m3u/sample-sorter.js +2 -2
  15. package/dist/containers/mp3/parse-mpeg-header.js +7 -2
  16. package/dist/containers/mp3/seek/audio-sample-from-cbr.js +4 -6
  17. package/dist/containers/mp3/seek/audio-sample-from-vbr.js +4 -6
  18. package/dist/containers/riff/convert-queued-sample-to-mediaparser-sample.d.ts +6 -5
  19. package/dist/containers/riff/convert-queued-sample-to-mediaparser-sample.js +7 -7
  20. package/dist/containers/riff/get-tracks-from-avi.js +5 -2
  21. package/dist/containers/riff/parse-movi.js +35 -25
  22. package/dist/containers/riff/parse-riff-body.js +13 -3
  23. package/dist/containers/riff/seek/fetch-idx1.js +4 -1
  24. package/dist/containers/transport-stream/handle-aac-packet.js +8 -7
  25. package/dist/containers/transport-stream/handle-avc-packet.js +8 -7
  26. package/dist/containers/wav/parse-fmt.js +3 -1
  27. package/dist/containers/wav/parse-media-section.js +5 -5
  28. package/dist/containers/webm/get-sample-from-block.d.ts +6 -0
  29. package/dist/containers/webm/get-sample-from-block.js +9 -8
  30. package/dist/containers/webm/make-track.js +5 -2
  31. package/dist/containers/webm/parse-ebml.js +12 -3
  32. package/dist/containers/webm/seek/seeking-hints.js +1 -1
  33. package/dist/convert-audio-or-video-sample.js +6 -9
  34. package/dist/download-and-parse-media.js +3 -0
  35. package/dist/esm/index.mjs +266 -196
  36. package/dist/esm/worker-server-entry.mjs +262 -196
  37. package/dist/esm/worker-web-entry.mjs +262 -196
  38. package/dist/get-duration.js +2 -2
  39. package/dist/get-sample-positions-from-mp4.js +2 -2
  40. package/dist/get-sample-positions.d.ts +2 -2
  41. package/dist/get-sample-positions.js +2 -2
  42. package/dist/get-tracks.d.ts +6 -3
  43. package/dist/index.d.ts +24 -5
  44. package/dist/index.js +3 -1
  45. package/dist/internal-parse-media.js +3 -0
  46. package/dist/iterator/buffer-manager.js +1 -1
  47. package/dist/parse-media.js +3 -0
  48. package/dist/samples-from-moof.js +2 -2
  49. package/dist/state/iso-base-media/cached-sample-positions.d.ts +2 -2
  50. package/dist/state/parser-state.d.ts +23 -5
  51. package/dist/state/riff/queued-frames.d.ts +14 -3
  52. package/dist/state/riff/queued-frames.js +3 -3
  53. package/dist/state/riff/sample-counter.d.ts +4 -1
  54. package/dist/state/riff/sample-counter.js +8 -7
  55. package/dist/state/riff.d.ts +15 -3
  56. package/dist/state/sample-callbacks.d.ts +8 -2
  57. package/dist/state/sample-callbacks.js +5 -4
  58. package/dist/state/samples-observed/slow-duration-fps.js +7 -6
  59. package/dist/version.d.ts +1 -1
  60. package/dist/version.js +1 -1
  61. package/dist/webcodec-sample-types.d.ts +3 -9
  62. package/dist/webcodecs-timescale.d.ts +1 -0
  63. package/dist/webcodecs-timescale.js +4 -0
  64. package/dist/worker-server.js +2 -1
  65. package/package.json +3 -3
@@ -2431,7 +2431,7 @@ var bufferManager = ({
2431
2431
  counter
2432
2432
  }) => {
2433
2433
  const buf = new ArrayBuffer(initialData.byteLength, {
2434
- maxByteLength: maxBytes === null ? initialData.byteLength : Math.min(maxBytes, 2 ** 32)
2434
+ maxByteLength: maxBytes === null ? initialData.byteLength : Math.min(maxBytes, 2 ** 31)
2435
2435
  });
2436
2436
  if (!buf.resize) {
2437
2437
  throw new Error("`ArrayBuffer.resize` is not supported in this Runtime. On the server: Use at least Node.js 20 or Bun. In the browser: Chrome 111, Edge 111, Safari 16.4, Firefox 128, Opera 111");
@@ -3102,6 +3102,9 @@ var getVideoCodecString = (trakBox) => {
3102
3102
  return videoSample.format;
3103
3103
  };
3104
3104
 
3105
+ // src/webcodecs-timescale.ts
3106
+ var WEBCODECS_TIMESCALE = 1e6;
3107
+
3105
3108
  // src/containers/iso-base-media/color-to-webcodecs-colors.ts
3106
3109
  var mediaParserAdvancedColorToWebCodecsColor = (color2) => {
3107
3110
  return {
@@ -3371,23 +3374,25 @@ var makeBaseMediaTrack = (trakBox, startTimeInSeconds) => {
3371
3374
  return {
3372
3375
  type: "audio",
3373
3376
  trackId: tkhdBox.trackId,
3374
- timescale: timescaleAndDuration.timescale,
3377
+ originalTimescale: timescaleAndDuration.timescale,
3375
3378
  codec: codecString,
3376
3379
  numberOfChannels: actual.numberOfChannels,
3377
3380
  sampleRate: actual.sampleRate,
3378
3381
  description: actual.codecPrivate?.data ?? undefined,
3379
3382
  codecData: actual.codecPrivate,
3380
3383
  codecEnum,
3381
- startInSeconds: startTimeInSeconds
3384
+ startInSeconds: startTimeInSeconds,
3385
+ timescale: WEBCODECS_TIMESCALE
3382
3386
  };
3383
3387
  }
3384
3388
  if (!trakBoxContainsVideo(trakBox)) {
3385
3389
  return {
3386
3390
  type: "other",
3387
3391
  trackId: tkhdBox.trackId,
3388
- timescale: timescaleAndDuration.timescale,
3392
+ originalTimescale: timescaleAndDuration.timescale,
3389
3393
  trakBox,
3390
- startInSeconds: startTimeInSeconds
3394
+ startInSeconds: startTimeInSeconds,
3395
+ timescale: WEBCODECS_TIMESCALE
3391
3396
  };
3392
3397
  }
3393
3398
  const videoSample = getStsdVideoConfig(trakBox);
@@ -3420,7 +3425,7 @@ var makeBaseMediaTrack = (trakBox, startTimeInSeconds) => {
3420
3425
  type: "video",
3421
3426
  trackId: tkhdBox.trackId,
3422
3427
  description: videoDescriptors ?? undefined,
3423
- timescale: timescaleAndDuration.timescale,
3428
+ originalTimescale: timescaleAndDuration.timescale,
3424
3429
  codec,
3425
3430
  sampleAspectRatio: getSampleAspectRatio(trakBox),
3426
3431
  width,
@@ -3435,7 +3440,8 @@ var makeBaseMediaTrack = (trakBox, startTimeInSeconds) => {
3435
3440
  advancedColor,
3436
3441
  codecEnum: getVideoCodecFromIsoTrak(trakBox),
3437
3442
  fps: getFpsFromMp4TrakBox(trakBox),
3438
- startInSeconds: startTimeInSeconds
3443
+ startInSeconds: startTimeInSeconds,
3444
+ timescale: WEBCODECS_TIMESCALE
3439
3445
  };
3440
3446
  return track;
3441
3447
  };
@@ -3558,9 +3564,10 @@ var makeAviAudioTrack = ({
3558
3564
  description: new Uint8Array([18, 16]),
3559
3565
  numberOfChannels: strf.numberOfChannels,
3560
3566
  sampleRate: strf.sampleRate,
3561
- timescale: MEDIA_PARSER_RIFF_TIMESCALE,
3567
+ originalTimescale: MEDIA_PARSER_RIFF_TIMESCALE,
3562
3568
  trackId: index,
3563
- startInSeconds: 0
3569
+ startInSeconds: 0,
3570
+ timescale: WEBCODECS_TIMESCALE
3564
3571
  };
3565
3572
  };
3566
3573
  var makeAviVideoTrack = ({
@@ -3581,7 +3588,7 @@ var makeAviVideoTrack = ({
3581
3588
  height: strf.height,
3582
3589
  type: "video",
3583
3590
  displayAspectHeight: strf.height,
3584
- timescale: MEDIA_PARSER_RIFF_TIMESCALE,
3591
+ originalTimescale: MEDIA_PARSER_RIFF_TIMESCALE,
3585
3592
  description: undefined,
3586
3593
  m3uStreamFormat: null,
3587
3594
  trackId: index,
@@ -3604,7 +3611,8 @@ var makeAviVideoTrack = ({
3604
3611
  denominator: 1
3605
3612
  },
3606
3613
  fps: strh.rate / strh.scale,
3607
- startInSeconds: 0
3614
+ startInSeconds: 0,
3615
+ timescale: WEBCODECS_TIMESCALE
3608
3616
  };
3609
3617
  };
3610
3618
  var getTracksFromAvi = (structure, state) => {
@@ -4247,7 +4255,7 @@ var getTrack = ({
4247
4255
  numerator: 1,
4248
4256
  denominator: 1
4249
4257
  },
4250
- timescale,
4258
+ originalTimescale: timescale,
4251
4259
  codedHeight: height.value.value,
4252
4260
  codedWidth: width.value.value,
4253
4261
  displayAspectHeight: displayHeight2 ? displayHeight2.value.value : height.value.value,
@@ -4258,7 +4266,8 @@ var getTrack = ({
4258
4266
  advancedColor,
4259
4267
  codecEnum,
4260
4268
  fps: null,
4261
- startInSeconds: 0
4269
+ startInSeconds: 0,
4270
+ timescale: WEBCODECS_TIMESCALE
4262
4271
  };
4263
4272
  }
4264
4273
  if (trackTypeToString(trackType2.value.value) === "audio") {
@@ -4273,7 +4282,7 @@ var getTrack = ({
4273
4282
  type: "audio",
4274
4283
  trackId,
4275
4284
  codec: codecString,
4276
- timescale,
4285
+ originalTimescale: timescale,
4277
4286
  numberOfChannels,
4278
4287
  sampleRate,
4279
4288
  description: getAudioDescription(track),
@@ -4281,7 +4290,8 @@ var getTrack = ({
4281
4290
  codecEnum: getMatroskaAudioCodecEnum({
4282
4291
  track
4283
4292
  }),
4284
- startInSeconds: 0
4293
+ startInSeconds: 0,
4294
+ timescale: WEBCODECS_TIMESCALE
4285
4295
  };
4286
4296
  }
4287
4297
  return null;
@@ -4841,8 +4851,8 @@ var getSamplesFromTraf = (trafSegment, moofOffset) => {
4841
4851
  const dts = time + (tfdtBox?.baseMediaDecodeTime ?? 0);
4842
4852
  const samplePosition = {
4843
4853
  offset: offset + (moofOffset ?? 0) + (dataOffset ?? 0),
4844
- dts,
4845
- cts: dts + (sample.sampleCompositionTimeOffset ?? 0),
4854
+ decodingTimestamp: dts,
4855
+ timestamp: dts + (sample.sampleCompositionTimeOffset ?? 0),
4846
4856
  duration: duration2,
4847
4857
  isKeyframe: keyframe,
4848
4858
  size,
@@ -4933,8 +4943,8 @@ var getSamplePositions = ({
4933
4943
  offset: Number(chunks[i]) + offsetInThisChunk,
4934
4944
  size,
4935
4945
  isKeyframe,
4936
- dts,
4937
- cts,
4946
+ decodingTimestamp: dts,
4947
+ timestamp: cts,
4938
4948
  duration: delta,
4939
4949
  chunk: i,
4940
4950
  bigEndian: false,
@@ -4983,8 +4993,8 @@ var getGroupedSamplesPositionsFromMp4 = ({
4983
4993
  }
4984
4994
  samples.push({
4985
4995
  chunk,
4986
- cts: timestamp,
4987
- dts: timestamp,
4996
+ timestamp,
4997
+ decodingTimestamp: timestamp,
4988
4998
  offset: Number(entry),
4989
4999
  size: stszBox.sampleSize * samplesPerChunk,
4990
5000
  duration: samplesPerChunk,
@@ -5344,7 +5354,7 @@ var getDurationFromIsoBaseMedia = (parserState) => {
5344
5354
  }
5345
5355
  const tracks2 = getTracks(parserState, true);
5346
5356
  const allSamples = tracks2.map((t) => {
5347
- const { timescale: ts } = t;
5357
+ const { originalTimescale: ts } = t;
5348
5358
  const trakBox = getTrakBoxByTrackId(moovBox, t.trackId);
5349
5359
  if (!trakBox) {
5350
5360
  return null;
@@ -5360,7 +5370,7 @@ var getDurationFromIsoBaseMedia = (parserState) => {
5360
5370
  if (samplePositions.length === 0) {
5361
5371
  return null;
5362
5372
  }
5363
- const highest = samplePositions?.map((sp) => (sp.cts + sp.duration) / ts).reduce((a, b) => Math.max(a, b), 0);
5373
+ const highest = samplePositions?.map((sp) => (sp.timestamp + sp.duration) / ts).reduce((a, b) => Math.max(a, b), 0);
5364
5374
  return highest ?? 0;
5365
5375
  });
5366
5376
  if (allSamples.every((s) => s === null)) {
@@ -5447,7 +5457,7 @@ var getKeyframesFromIsoBaseMedia = (state) => {
5447
5457
  return [];
5448
5458
  }
5449
5459
  const allSamples = videoTracks.map((t) => {
5450
- const { timescale: ts } = t;
5460
+ const { originalTimescale: ts } = t;
5451
5461
  const trakBox = getTrakBoxByTrackId(moov, t.trackId);
5452
5462
  if (!trakBox) {
5453
5463
  return [];
@@ -5468,8 +5478,8 @@ var getKeyframesFromIsoBaseMedia = (state) => {
5468
5478
  }).map((k) => {
5469
5479
  return {
5470
5480
  trackId: t.trackId,
5471
- presentationTimeInSeconds: k.cts / ts,
5472
- decodingTimeInSeconds: k.dts / ts,
5481
+ presentationTimeInSeconds: k.timestamp / ts,
5482
+ decodingTimeInSeconds: k.decodingTimestamp / ts,
5473
5483
  positionInBytes: k.offset,
5474
5484
  sizeInBytes: k.size
5475
5485
  };
@@ -5878,8 +5888,8 @@ var findKeyframeBeforeTime = ({
5878
5888
  let videoByte = 0;
5879
5889
  let videoSample = null;
5880
5890
  for (const sample of samplePositions) {
5881
- const ctsInSeconds = sample.cts / timescale + startInSeconds;
5882
- const dtsInSeconds = sample.dts / timescale + startInSeconds;
5891
+ const ctsInSeconds = sample.timestamp / timescale + startInSeconds;
5892
+ const dtsInSeconds = sample.decodingTimestamp / timescale + startInSeconds;
5883
5893
  if (!sample.isKeyframe) {
5884
5894
  continue;
5885
5895
  }
@@ -6031,8 +6041,8 @@ var getSamplePositionBounds = (samplePositions, timescale) => {
6031
6041
  let min = Infinity;
6032
6042
  let max = -Infinity;
6033
6043
  for (const samplePosition of samplePositions) {
6034
- const timestampMin = Math.min(samplePosition.cts, samplePosition.dts);
6035
- const timestampMax = Math.max(samplePosition.cts, samplePosition.dts) + (samplePosition.duration ?? 0);
6044
+ const timestampMin = Math.min(samplePosition.timestamp, samplePosition.decodingTimestamp);
6045
+ const timestampMax = Math.max(samplePosition.timestamp, samplePosition.decodingTimestamp) + (samplePosition.duration ?? 0);
6036
6046
  if (timestampMin < min) {
6037
6047
  min = timestampMin;
6038
6048
  }
@@ -6116,13 +6126,13 @@ var getSeekingByteFromFragmentedMp4 = async ({
6116
6126
  });
6117
6127
  Log.trace(logLevel, "Fragmented MP4 - Checking if we have seeking info for this time range");
6118
6128
  for (const positions of samplePositionsArray) {
6119
- const { min, max } = getSamplePositionBounds(positions.samples, firstTrack.timescale);
6129
+ const { min, max } = getSamplePositionBounds(positions.samples, firstTrack.originalTimescale);
6120
6130
  if (min <= time && (positions.isLastFragment || isLastChunkInPlaylist || time <= max)) {
6121
6131
  Log.trace(logLevel, `Fragmented MP4 - Found that we have seeking info for this time range: ${min} <= ${time} <= ${max}`);
6122
6132
  const kf = findKeyframeBeforeTime({
6123
6133
  samplePositions: positions.samples,
6124
6134
  time,
6125
- timescale: firstTrack.timescale,
6135
+ timescale: firstTrack.originalTimescale,
6126
6136
  logLevel,
6127
6137
  mediaSections: info.mediaSections,
6128
6138
  startInSeconds: firstTrack.startInSeconds
@@ -6141,7 +6151,7 @@ var getSeekingByteFromFragmentedMp4 = async ({
6141
6151
  mfra: atom,
6142
6152
  time,
6143
6153
  firstTrack,
6144
- timescale: firstTrack.timescale
6154
+ timescale: firstTrack.originalTimescale
6145
6155
  });
6146
6156
  if (moofOffset !== null && !(moofOffset.start <= currentPosition && currentPosition < moofOffset.end)) {
6147
6157
  Log.verbose(logLevel, `Fragmented MP4 - Found based on mfra information that we should seek to: ${moofOffset.start} ${moofOffset.end}`);
@@ -6225,7 +6235,7 @@ var getSeekingByteFromIsoBaseMedia = ({
6225
6235
  const keyframe = findKeyframeBeforeTime({
6226
6236
  samplePositions,
6227
6237
  time,
6228
- timescale: track.timescale,
6238
+ timescale: track.originalTimescale,
6229
6239
  logLevel,
6230
6240
  mediaSections: info.mediaSections,
6231
6241
  startInSeconds: track.startInSeconds
@@ -6703,7 +6713,6 @@ var getSeekingByteFromMatroska = async ({
6703
6713
  };
6704
6714
 
6705
6715
  // src/convert-audio-or-video-sample.ts
6706
- var TARGET_TIMESCALE = 1e6;
6707
6716
  var fixFloat = (value) => {
6708
6717
  if (value % 1 < 0.0000001) {
6709
6718
  return Math.floor(value);
@@ -6717,20 +6726,17 @@ var convertAudioOrVideoSampleToWebCodecsTimestamps = ({
6717
6726
  sample,
6718
6727
  timescale
6719
6728
  }) => {
6720
- if (timescale === TARGET_TIMESCALE) {
6729
+ if (timescale === WEBCODECS_TIMESCALE) {
6721
6730
  return sample;
6722
6731
  }
6723
- const { cts, dts, timestamp } = sample;
6732
+ const { decodingTimestamp: dts, timestamp } = sample;
6724
6733
  return {
6725
- cts: fixFloat(cts * (TARGET_TIMESCALE / timescale)),
6726
- dts: fixFloat(dts * (TARGET_TIMESCALE / timescale)),
6727
- timestamp: fixFloat(timestamp * (TARGET_TIMESCALE / timescale)),
6728
- duration: sample.duration === undefined ? undefined : fixFloat(sample.duration * (TARGET_TIMESCALE / timescale)),
6734
+ decodingTimestamp: fixFloat(dts * (WEBCODECS_TIMESCALE / timescale)),
6735
+ timestamp: fixFloat(timestamp * (WEBCODECS_TIMESCALE / timescale)),
6736
+ duration: sample.duration === undefined ? undefined : fixFloat(sample.duration * (WEBCODECS_TIMESCALE / timescale)),
6729
6737
  data: sample.data,
6730
- trackId: sample.trackId,
6731
6738
  type: sample.type,
6732
6739
  offset: sample.offset,
6733
- timescale: TARGET_TIMESCALE,
6734
6740
  ..."avc" in sample ? { avc: sample.avc } : {}
6735
6741
  };
6736
6742
  };
@@ -7191,7 +7197,7 @@ var handleAvcPacket = async ({
7191
7197
  rotation: 0,
7192
7198
  trackId: programId,
7193
7199
  type: "video",
7194
- timescale: MPEG_TIMESCALE,
7200
+ originalTimescale: MPEG_TIMESCALE,
7195
7201
  codec: getCodecStringFromSpsAndPps(spsAndPps.sps),
7196
7202
  codecData: { type: "avc-sps-pps", data: codecPrivate2 },
7197
7203
  fps: null,
@@ -7209,7 +7215,8 @@ var handleAvcPacket = async ({
7209
7215
  },
7210
7216
  colorSpace: mediaParserAdvancedColorToWebCodecsColor(advancedColor),
7211
7217
  advancedColor,
7212
- startInSeconds: 0
7218
+ startInSeconds: 0,
7219
+ timescale: WEBCODECS_TIMESCALE
7213
7220
  };
7214
7221
  await registerVideoTrack({
7215
7222
  track,
@@ -7222,15 +7229,12 @@ var handleAvcPacket = async ({
7222
7229
  }
7223
7230
  const type = getKeyFrameOrDeltaFromAvcInfo(avc);
7224
7231
  const sample = {
7225
- cts: streamBuffer.pesHeader.pts - transportStream.startOffset.getOffset(programId),
7226
- dts: (streamBuffer.pesHeader.dts ?? streamBuffer.pesHeader.pts) - transportStream.startOffset.getOffset(programId),
7232
+ decodingTimestamp: (streamBuffer.pesHeader.dts ?? streamBuffer.pesHeader.pts) - transportStream.startOffset.getOffset(programId),
7227
7233
  timestamp: streamBuffer.pesHeader.pts - transportStream.startOffset.getOffset(programId),
7228
7234
  duration: undefined,
7229
7235
  data: streamBuffer.getBuffer(),
7230
- trackId: programId,
7231
7236
  type: type === "bidirectional" ? "delta" : type,
7232
- offset,
7233
- timescale: MPEG_TIMESCALE
7237
+ offset
7234
7238
  };
7235
7239
  if (type === "key") {
7236
7240
  transportStream.observedPesHeaders.markPtsAsKeyframe(streamBuffer.pesHeader.pts);
@@ -7239,7 +7243,10 @@ var handleAvcPacket = async ({
7239
7243
  sample,
7240
7244
  timescale: MPEG_TIMESCALE
7241
7245
  });
7242
- await sampleCallbacks.onVideoSample(programId, videoSample);
7246
+ await sampleCallbacks.onVideoSample({
7247
+ videoSample,
7248
+ trackId: programId
7249
+ });
7243
7250
  transportStream.lastEmittedSample.setLastEmittedSample(sample);
7244
7251
  };
7245
7252
 
@@ -7587,7 +7594,7 @@ var getSeekingHintsFromMatroska = (tracksState, keyframesState, webmState) => {
7587
7594
  return {
7588
7595
  type: "webm-seeking-hints",
7589
7596
  track: firstVideoTrack ? {
7590
- timescale: firstVideoTrack.timescale,
7597
+ timescale: firstVideoTrack.originalTimescale,
7591
7598
  trackId: firstVideoTrack.trackId
7592
7599
  } : null,
7593
7600
  keyframes,
@@ -8716,10 +8723,11 @@ var parseAac = async (state) => {
8716
8723
  description: codecPrivate2,
8717
8724
  numberOfChannels: channelConfiguration,
8718
8725
  sampleRate,
8719
- timescale: 1e6,
8726
+ originalTimescale: WEBCODECS_TIMESCALE,
8720
8727
  trackId: 0,
8721
8728
  type: "audio",
8722
- startInSeconds: 0
8729
+ startInSeconds: 0,
8730
+ timescale: WEBCODECS_TIMESCALE
8723
8731
  },
8724
8732
  registerAudioSampleCallback: state.callbacks.registerAudioSampleCallback,
8725
8733
  tracks: state.callbacks.tracks,
@@ -8742,15 +8750,15 @@ var parseAac = async (state) => {
8742
8750
  type: "key",
8743
8751
  data,
8744
8752
  offset: startOffset,
8745
- timescale: 1e6,
8746
- trackId: 0,
8747
- cts: timestamp,
8748
- dts: timestamp,
8753
+ decodingTimestamp: timestamp,
8749
8754
  timestamp
8750
8755
  },
8751
8756
  timescale: 1
8752
8757
  });
8753
- await state.callbacks.onAudioSample(0, audioSample);
8758
+ await state.callbacks.onAudioSample({
8759
+ audioSample,
8760
+ trackId: 0
8761
+ });
8754
8762
  return Promise.resolve(null);
8755
8763
  };
8756
8764
 
@@ -8928,7 +8936,7 @@ var emitSample = async ({
8928
8936
  data,
8929
8937
  offset
8930
8938
  }) => {
8931
- const iterator = getArrayBufferIterator(data, null);
8939
+ const iterator = getArrayBufferIterator(data, data.length);
8932
8940
  const parsed = parseFrameHeader({ iterator, state });
8933
8941
  if (!parsed) {
8934
8942
  throw new Error("Invalid CRC");
@@ -8953,17 +8961,17 @@ var emitSample = async ({
8953
8961
  sample: {
8954
8962
  data,
8955
8963
  duration: duration2,
8956
- cts: timestamp,
8957
- dts: timestamp,
8964
+ decodingTimestamp: timestamp,
8958
8965
  timestamp,
8959
8966
  type: "key",
8960
- offset,
8961
- timescale: 1,
8962
- trackId: 0
8967
+ offset
8963
8968
  },
8964
8969
  timescale: 1
8965
8970
  });
8966
- await state.callbacks.onAudioSample(0, audioSample);
8971
+ await state.callbacks.onAudioSample({
8972
+ audioSample,
8973
+ trackId: 0
8974
+ });
8967
8975
  iterator.destroy();
8968
8976
  };
8969
8977
  var parseFlacFrame = async ({
@@ -9119,9 +9127,10 @@ var parseStreamInfo = async ({
9119
9127
  codecEnum: "flac",
9120
9128
  numberOfChannels: channels2,
9121
9129
  sampleRate,
9122
- timescale: 1e6,
9130
+ originalTimescale: WEBCODECS_TIMESCALE,
9123
9131
  trackId: 0,
9124
- startInSeconds: 0
9132
+ startInSeconds: 0,
9133
+ timescale: WEBCODECS_TIMESCALE
9125
9134
  },
9126
9135
  registerAudioSampleCallback: state.callbacks.registerAudioSampleCallback,
9127
9136
  tracks: state.callbacks.tracks,
@@ -10037,7 +10046,7 @@ var parseAvcc = ({
10037
10046
 
10038
10047
  // src/containers/iso-base-media/parse-icc-profile.ts
10039
10048
  var parseIccProfile = (data) => {
10040
- const iterator = getArrayBufferIterator(data, Infinity);
10049
+ const iterator = getArrayBufferIterator(data, data.length);
10041
10050
  const size = iterator.getUint32();
10042
10051
  if (size !== data.length) {
10043
10052
  throw new Error("Invalid ICC profile size");
@@ -10082,7 +10091,7 @@ var parseIccProfile = (data) => {
10082
10091
  for (const entry of entries) {
10083
10092
  const found = data.slice(entry.offset, entry.offset + entry.size);
10084
10093
  if (entry.tag === "rXYZ" || entry.tag === "gXYZ" || entry.tag === "bXYZ" || entry.tag === "wtpt") {
10085
- const it = getArrayBufferIterator(found, Infinity);
10094
+ const it = getArrayBufferIterator(found, found.length);
10086
10095
  it.discard(4);
10087
10096
  const x = it.getInt32() / 65536;
10088
10097
  const y = it.getInt32() / 65536;
@@ -11508,7 +11517,7 @@ var getMoovAtom = async ({
11508
11517
  // src/containers/iso-base-media/mdat/calculate-jump-marks.ts
11509
11518
  var MAX_SPREAD_IN_SECONDS = 8;
11510
11519
  var getKey = (samplePositionTrack) => {
11511
- return `${samplePositionTrack.track.trackId}-${samplePositionTrack.samplePosition.dts}`;
11520
+ return `${samplePositionTrack.track.trackId}-${samplePositionTrack.samplePosition.decodingTimestamp}`;
11512
11521
  };
11513
11522
  var findBestJump = ({
11514
11523
  allSamplesSortedByOffset,
@@ -11601,7 +11610,7 @@ var calculateJumpMarks = (samplePositionTracks, endOfMdat) => {
11601
11610
  addFinalJumpIfNecessary();
11602
11611
  break;
11603
11612
  }
11604
- const timestamp = currentSamplePosition.samplePosition.dts / currentSamplePosition.track.timescale;
11613
+ const timestamp = currentSamplePosition.samplePosition.decodingTimestamp / currentSamplePosition.track.originalTimescale;
11605
11614
  progresses[currentSamplePosition.track.trackId] = timestamp;
11606
11615
  const progressValues = Object.values(progresses);
11607
11616
  const maxProgress = Math.max(...progressValues);
@@ -11720,17 +11729,17 @@ var parseMdatSection = async (state) => {
11720
11729
  return makeFetchMoreData(samplesWithIndex.samplePosition.size - iterator.bytesRemaining());
11721
11730
  }
11722
11731
  const {
11723
- cts: rawCts,
11724
- dts: rawDts,
11732
+ timestamp: rawCts,
11733
+ decodingTimestamp: rawDts,
11725
11734
  duration: duration2,
11726
11735
  isKeyframe,
11727
11736
  offset,
11728
11737
  bigEndian,
11729
11738
  chunkSize
11730
11739
  } = samplesWithIndex.samplePosition;
11731
- const { timescale, startInSeconds } = samplesWithIndex.track;
11732
- const cts = rawCts + startInSeconds * timescale;
11733
- const dts = rawDts + startInSeconds * timescale;
11740
+ const { originalTimescale, startInSeconds } = samplesWithIndex.track;
11741
+ const cts = rawCts + startInSeconds * originalTimescale;
11742
+ const dts = rawDts + startInSeconds * originalTimescale;
11734
11743
  const bytes = postprocessBytes({
11735
11744
  bytes: iterator.getSlice(samplesWithIndex.samplePosition.size),
11736
11745
  bigEndian,
@@ -11742,16 +11751,16 @@ var parseMdatSection = async (state) => {
11742
11751
  data: bytes,
11743
11752
  timestamp: cts,
11744
11753
  duration: duration2,
11745
- cts,
11746
- dts,
11747
- trackId: samplesWithIndex.track.trackId,
11754
+ decodingTimestamp: dts,
11748
11755
  type: isKeyframe ? "key" : "delta",
11749
- offset,
11750
- timescale
11756
+ offset
11751
11757
  },
11752
- timescale
11758
+ timescale: originalTimescale
11759
+ });
11760
+ await state.callbacks.onAudioSample({
11761
+ audioSample,
11762
+ trackId: samplesWithIndex.track.trackId
11753
11763
  });
11754
- await state.callbacks.onAudioSample(samplesWithIndex.track.trackId, audioSample);
11755
11764
  }
11756
11765
  if (samplesWithIndex.track.type === "video") {
11757
11766
  const nalUnitType = bytes[4] & 31;
@@ -11765,16 +11774,16 @@ var parseMdatSection = async (state) => {
11765
11774
  data: bytes,
11766
11775
  timestamp: cts,
11767
11776
  duration: duration2,
11768
- cts,
11769
- dts,
11770
- trackId: samplesWithIndex.track.trackId,
11777
+ decodingTimestamp: dts,
11771
11778
  type: isKeyframe && !isRecoveryPoint ? "key" : "delta",
11772
- offset,
11773
- timescale
11779
+ offset
11774
11780
  },
11775
- timescale
11781
+ timescale: originalTimescale
11782
+ });
11783
+ await state.callbacks.onVideoSample({
11784
+ videoSample,
11785
+ trackId: samplesWithIndex.track.trackId
11776
11786
  });
11777
- await state.callbacks.onVideoSample(samplesWithIndex.track.trackId, videoSample);
11778
11787
  }
11779
11788
  const jump = jumpMarks.find((j) => j.afterSampleWithOffset === offset);
11780
11789
  if (jump) {
@@ -12177,6 +12186,9 @@ var webReader = {
12177
12186
  };
12178
12187
  // src/parse-media.ts
12179
12188
  var parseMedia = (options) => {
12189
+ if (!options) {
12190
+ return Promise.reject(new Error("No options provided. See https://www.remotion.dev/media-parser for how to get started."));
12191
+ }
12180
12192
  return internalParseMedia({
12181
12193
  fields: options.fields ?? null,
12182
12194
  logLevel: options.logLevel ?? "info",
@@ -12244,7 +12256,7 @@ var considerSeekBasedOnChunk = async ({
12244
12256
  await callback(sample);
12245
12257
  return;
12246
12258
  }
12247
- const timestamp = Math.min(sample.dts / sample.timescale, sample.cts / sample.timescale);
12259
+ const timestamp = Math.min(sample.decodingTimestamp / WEBCODECS_TIMESCALE, sample.timestamp / WEBCODECS_TIMESCALE);
12248
12260
  if (timestamp > pendingSeek.targetTime && chunkIndex !== null && chunkIndex > 0) {
12249
12261
  m3uState.setNextSeekShouldSubtractChunks(playlistUrl, subtractChunks + 1);
12250
12262
  parentController.seek(pendingSeek.targetTime);
@@ -12978,17 +12990,14 @@ var getAudioSampleFromCbr = ({
12978
12990
  const nthFrame = Math.round((initialOffset - state.mediaSection.getMediaSectionAssertOnlyOne().start) / avgLength);
12979
12991
  const durationInSeconds = samplesPerFrame / sampleRate;
12980
12992
  const timeInSeconds = nthFrame * samplesPerFrame / sampleRate;
12981
- const timestamp = Math.round(timeInSeconds * 1e6);
12982
- const duration2 = Math.round(durationInSeconds * 1e6);
12993
+ const timestamp = Math.round(timeInSeconds * WEBCODECS_TIMESCALE);
12994
+ const duration2 = Math.round(durationInSeconds * WEBCODECS_TIMESCALE);
12983
12995
  const audioSample = {
12984
12996
  data,
12985
- cts: timestamp,
12986
- dts: timestamp,
12997
+ decodingTimestamp: timestamp,
12987
12998
  duration: duration2,
12988
12999
  offset: initialOffset,
12989
- timescale: 1e6,
12990
13000
  timestamp,
12991
- trackId: 0,
12992
13001
  type: "key"
12993
13002
  };
12994
13003
  return { audioSample, timeInSeconds, durationInSeconds };
@@ -13025,17 +13034,14 @@ var getAudioSampleFromVbr = ({
13025
13034
  tableOfContents: info.xingData.tableOfContents
13026
13035
  });
13027
13036
  const durationInSeconds = samplesPerFrame / info.xingData.sampleRate;
13028
- const timestamp = Math.round(timeInSeconds * 1e6);
13029
- const duration2 = Math.round(durationInSeconds * 1e6);
13037
+ const timestamp = Math.round(timeInSeconds * WEBCODECS_TIMESCALE);
13038
+ const duration2 = Math.round(durationInSeconds * WEBCODECS_TIMESCALE);
13030
13039
  const audioSample = {
13031
13040
  data,
13032
- cts: timestamp,
13033
- dts: timestamp,
13041
+ decodingTimestamp: timestamp,
13034
13042
  duration: duration2,
13035
13043
  offset: position,
13036
- timescale: 1e6,
13037
13044
  timestamp,
13038
- trackId: 0,
13039
13045
  type: "key"
13040
13046
  };
13041
13047
  return { timeInSeconds, audioSample, durationInSeconds };
@@ -13108,9 +13114,10 @@ var parseMpegHeader = async ({
13108
13114
  description: undefined,
13109
13115
  numberOfChannels,
13110
13116
  sampleRate,
13111
- timescale: 1e6,
13117
+ originalTimescale: 1e6,
13112
13118
  trackId: 0,
13113
- startInSeconds: 0
13119
+ startInSeconds: 0,
13120
+ timescale: WEBCODECS_TIMESCALE
13114
13121
  },
13115
13122
  registerAudioSampleCallback: state.callbacks.registerAudioSampleCallback,
13116
13123
  tracks: state.callbacks.tracks,
@@ -13147,7 +13154,10 @@ var parseMpegHeader = async ({
13147
13154
  offset: initialOffset,
13148
13155
  durationInSeconds
13149
13156
  });
13150
- await state.callbacks.onAudioSample(0, audioSample);
13157
+ await state.callbacks.onAudioSample({
13158
+ audioSample,
13159
+ trackId: 0
13160
+ });
13151
13161
  };
13152
13162
 
13153
13163
  // src/containers/mp3/seek/wait-until-syncword.ts
@@ -13223,11 +13233,12 @@ var getStrhForIndex = (structure, trackId) => {
13223
13233
  // src/containers/riff/convert-queued-sample-to-mediaparser-sample.ts
13224
13234
  var getKeyFrameOffsetAndPocs = ({
13225
13235
  state,
13226
- sample
13236
+ sample,
13237
+ trackId
13227
13238
  }) => {
13228
13239
  if (sample.type === "key") {
13229
13240
  const sampleOffset = state.riff.sampleCounter.getSampleCountForTrack({
13230
- trackId: sample.trackId
13241
+ trackId
13231
13242
  });
13232
13243
  return {
13233
13244
  sampleOffsetAtKeyframe: sampleOffset,
@@ -13239,7 +13250,7 @@ var getKeyFrameOffsetAndPocs = ({
13239
13250
  if (!keyframeAtOffset) {
13240
13251
  throw new Error("no keyframe at offset");
13241
13252
  }
13242
- const sampleOffsetAtKeyframe = keyframeAtOffset.sampleCounts[sample.trackId];
13253
+ const sampleOffsetAtKeyframe = keyframeAtOffset.sampleCounts[trackId];
13243
13254
  const pocsAtKeyframeOffset = state.riff.sampleCounter.getPocAtKeyframeOffset({
13244
13255
  keyframeOffset: keyframeAtOffset.positionInBytes
13245
13256
  });
@@ -13248,12 +13259,17 @@ var getKeyFrameOffsetAndPocs = ({
13248
13259
  pocsAtKeyframeOffset
13249
13260
  };
13250
13261
  };
13251
- var convertQueuedSampleToMediaParserSample = (sample, state) => {
13252
- const strh = getStrhForIndex(state.structure.getRiffStructure(), sample.trackId);
13262
+ var convertQueuedSampleToMediaParserSample = ({
13263
+ sample,
13264
+ state,
13265
+ trackId
13266
+ }) => {
13267
+ const strh = getStrhForIndex(state.structure.getRiffStructure(), trackId);
13253
13268
  const samplesPerSecond = strh.rate / strh.scale;
13254
13269
  const { sampleOffsetAtKeyframe, pocsAtKeyframeOffset } = getKeyFrameOffsetAndPocs({
13255
13270
  sample,
13256
- state
13271
+ state,
13272
+ trackId
13257
13273
  });
13258
13274
  const indexOfPoc = pocsAtKeyframeOffset.findIndex((poc) => poc === sample.avc?.poc);
13259
13275
  if (indexOfPoc === -1) {
@@ -13265,8 +13281,7 @@ var convertQueuedSampleToMediaParserSample = (sample, state) => {
13265
13281
  sample: {
13266
13282
  ...sample,
13267
13283
  timestamp,
13268
- cts: timestamp,
13269
- dts: timestamp
13284
+ decodingTimestamp: timestamp
13270
13285
  },
13271
13286
  timescale: 1
13272
13287
  });
@@ -13676,10 +13691,8 @@ var handleChunk = async ({
13676
13691
  const rawSample = {
13677
13692
  data,
13678
13693
  duration: 1 / samplesPerSecond,
13679
- trackId,
13680
13694
  type: keyOrDelta === "bidirectional" ? "delta" : keyOrDelta,
13681
13695
  offset,
13682
- timescale: samplesPerSecond,
13683
13696
  avc: info
13684
13697
  };
13685
13698
  const maxFramesInBuffer = state.avc.getMaxFramesInBuffer();
@@ -13696,14 +13709,29 @@ var handleChunk = async ({
13696
13709
  poc: info.poc
13697
13710
  });
13698
13711
  }
13699
- state.riff.queuedBFrames.addFrame(rawSample, maxFramesInBuffer);
13712
+ state.riff.queuedBFrames.addFrame({
13713
+ frame: rawSample,
13714
+ trackId,
13715
+ maxFramesInBuffer,
13716
+ timescale: samplesPerSecond
13717
+ });
13700
13718
  const releasedFrame = state.riff.queuedBFrames.getReleasedFrame();
13701
13719
  if (!releasedFrame) {
13702
13720
  return;
13703
13721
  }
13704
- const videoSample = convertQueuedSampleToMediaParserSample(releasedFrame, state);
13705
- state.riff.sampleCounter.onVideoSample(videoSample);
13706
- await state.callbacks.onVideoSample(trackId, videoSample);
13722
+ const videoSample = convertQueuedSampleToMediaParserSample({
13723
+ sample: releasedFrame.sample,
13724
+ state,
13725
+ trackId: releasedFrame.trackId
13726
+ });
13727
+ state.riff.sampleCounter.onVideoSample({
13728
+ trackId,
13729
+ videoSample
13730
+ });
13731
+ await state.callbacks.onVideoSample({
13732
+ videoSample,
13733
+ trackId
13734
+ });
13707
13735
  }
13708
13736
  const audioChunk = ckId.match(/^([0-9]{2})wb$/);
13709
13737
  if (audioChunk) {
@@ -13718,24 +13746,21 @@ var handleChunk = async ({
13718
13746
  trackId
13719
13747
  });
13720
13748
  const timeInSec = nthSample / samplesPerSecond;
13721
- const timestamp = timeInSec;
13749
+ const timestamp = Math.round(timeInSec * WEBCODECS_TIMESCALE);
13722
13750
  const data = iterator.getSlice(ckSize);
13723
- const audioSample = convertAudioOrVideoSampleToWebCodecsTimestamps({
13724
- sample: {
13725
- cts: timestamp,
13726
- dts: timestamp,
13727
- data,
13728
- duration: undefined,
13729
- timestamp,
13730
- trackId,
13731
- type: "key",
13732
- offset,
13733
- timescale: samplesPerSecond
13734
- },
13735
- timescale: 1
13736
- });
13751
+ const audioSample = {
13752
+ decodingTimestamp: timestamp,
13753
+ data,
13754
+ duration: undefined,
13755
+ timestamp,
13756
+ type: "key",
13757
+ offset
13758
+ };
13737
13759
  state.riff.sampleCounter.onAudioSample(trackId, audioSample);
13738
- await state.callbacks.onAudioSample(trackId, audioSample);
13760
+ await state.callbacks.onAudioSample({
13761
+ audioSample,
13762
+ trackId
13763
+ });
13739
13764
  }
13740
13765
  };
13741
13766
  var parseMovi = async ({
@@ -13778,9 +13803,19 @@ var parseMediaSection = async (state) => {
13778
13803
  var parseRiffBody = async (state) => {
13779
13804
  const releasedFrame = state.riff.queuedBFrames.getReleasedFrame();
13780
13805
  if (releasedFrame) {
13781
- const converted = convertQueuedSampleToMediaParserSample(releasedFrame, state);
13782
- state.riff.sampleCounter.onVideoSample(converted);
13783
- await state.callbacks.onVideoSample(releasedFrame.trackId, converted);
13806
+ const converted = convertQueuedSampleToMediaParserSample({
13807
+ sample: releasedFrame.sample,
13808
+ state,
13809
+ trackId: releasedFrame.trackId
13810
+ });
13811
+ state.riff.sampleCounter.onVideoSample({
13812
+ trackId: releasedFrame.trackId,
13813
+ videoSample: converted
13814
+ });
13815
+ await state.callbacks.onVideoSample({
13816
+ videoSample: converted,
13817
+ trackId: releasedFrame.trackId
13818
+ });
13784
13819
  return null;
13785
13820
  }
13786
13821
  if (state.mediaSection.isCurrentByteInMediaSection(state.iterator) === "in-section") {
@@ -14158,13 +14193,14 @@ var handleAacPacket = async ({
14158
14193
  type: "audio",
14159
14194
  codecData: { type: "aac-config", data: codecPrivate2 },
14160
14195
  trackId: programId,
14161
- timescale: MPEG_TIMESCALE,
14196
+ originalTimescale: MPEG_TIMESCALE,
14162
14197
  codecEnum: "aac",
14163
14198
  codec: mapAudioObjectTypeToCodecString(audioObjectType),
14164
14199
  description: codecPrivate2,
14165
14200
  numberOfChannels: channelConfiguration,
14166
14201
  sampleRate,
14167
- startInSeconds: 0
14202
+ startInSeconds: 0,
14203
+ timescale: WEBCODECS_TIMESCALE
14168
14204
  };
14169
14205
  await registerAudioTrack({
14170
14206
  track,
@@ -14176,21 +14212,21 @@ var handleAacPacket = async ({
14176
14212
  });
14177
14213
  }
14178
14214
  const sample = {
14179
- cts: streamBuffer.pesHeader.pts - transportStream.startOffset.getOffset(programId),
14180
- dts: (streamBuffer.pesHeader.dts ?? streamBuffer.pesHeader.pts) - transportStream.startOffset.getOffset(programId),
14215
+ decodingTimestamp: (streamBuffer.pesHeader.dts ?? streamBuffer.pesHeader.pts) - transportStream.startOffset.getOffset(programId),
14181
14216
  timestamp: streamBuffer.pesHeader.pts - transportStream.startOffset.getOffset(programId),
14182
14217
  duration: undefined,
14183
14218
  data: streamBuffer.getBuffer(),
14184
- trackId: programId,
14185
14219
  type: "key",
14186
- offset,
14187
- timescale: MPEG_TIMESCALE
14220
+ offset
14188
14221
  };
14189
14222
  const audioSample = convertAudioOrVideoSampleToWebCodecsTimestamps({
14190
14223
  sample,
14191
14224
  timescale: MPEG_TIMESCALE
14192
14225
  });
14193
- await sampleCallbacks.onAudioSample(programId, audioSample);
14226
+ await sampleCallbacks.onAudioSample({
14227
+ audioSample,
14228
+ trackId: programId
14229
+ });
14194
14230
  transportStream.lastEmittedSample.setLastEmittedSample(sample);
14195
14231
  };
14196
14232
 
@@ -14681,9 +14717,10 @@ var parseFmt = async ({
14681
14717
  codecEnum: format,
14682
14718
  numberOfChannels,
14683
14719
  sampleRate,
14684
- timescale: 1e6,
14720
+ originalTimescale: 1e6,
14685
14721
  trackId: 0,
14686
- startInSeconds: 0
14722
+ startInSeconds: 0,
14723
+ timescale: WEBCODECS_TIMESCALE
14687
14724
  },
14688
14725
  container: "wav",
14689
14726
  registerAudioSampleCallback: state.callbacks.registerAudioSampleCallback,
@@ -14783,19 +14820,19 @@ var parseMediaSection2 = async ({
14783
14820
  const data = iterator.getSlice(toRead);
14784
14821
  const audioSample = convertAudioOrVideoSampleToWebCodecsTimestamps({
14785
14822
  sample: {
14786
- cts: timestamp,
14787
- dts: timestamp,
14823
+ decodingTimestamp: timestamp,
14788
14824
  data,
14789
14825
  duration: duration2,
14790
14826
  timestamp,
14791
- trackId: 0,
14792
14827
  type: "key",
14793
- offset,
14794
- timescale: 1e6
14828
+ offset
14795
14829
  },
14796
14830
  timescale: 1
14797
14831
  });
14798
- await state.callbacks.onAudioSample(0, audioSample);
14832
+ await state.callbacks.onAudioSample({
14833
+ audioSample,
14834
+ trackId: 0
14835
+ });
14799
14836
  return null;
14800
14837
  };
14801
14838
 
@@ -14968,19 +15005,18 @@ var getSampleFromBlock = async ({
14968
15005
  if (codec.startsWith("V_")) {
14969
15006
  const partialVideoSample = {
14970
15007
  data: iterator.getSlice(remainingNow),
14971
- cts: timecodeInMicroseconds,
14972
- dts: timecodeInMicroseconds,
15008
+ decodingTimestamp: timecodeInMicroseconds,
14973
15009
  duration: undefined,
14974
- trackId: trackNumber2,
14975
15010
  timestamp: timecodeInMicroseconds,
14976
- offset,
14977
- timescale
15011
+ offset
14978
15012
  };
14979
15013
  if (keyframe === null) {
14980
15014
  iterator.destroy();
14981
15015
  return {
14982
15016
  type: "partial-video-sample",
14983
- partialVideoSample
15017
+ partialVideoSample,
15018
+ trackId: trackNumber2,
15019
+ timescale: WEBCODECS_TIMESCALE
14984
15020
  };
14985
15021
  }
14986
15022
  await addAvcToTrackAndActivateTrackIfNecessary({
@@ -15001,25 +15037,26 @@ var getSampleFromBlock = async ({
15001
15037
  iterator.destroy();
15002
15038
  return {
15003
15039
  type: "video-sample",
15004
- videoSample: sample
15040
+ videoSample: sample,
15041
+ trackId: trackNumber2,
15042
+ timescale: WEBCODECS_TIMESCALE
15005
15043
  };
15006
15044
  }
15007
15045
  if (codec.startsWith("A_")) {
15008
15046
  const audioSample = {
15009
15047
  data: iterator.getSlice(remainingNow),
15010
- trackId: trackNumber2,
15011
15048
  timestamp: timecodeInMicroseconds,
15012
15049
  type: "key",
15013
15050
  duration: undefined,
15014
- cts: timecodeInMicroseconds,
15015
- dts: timecodeInMicroseconds,
15016
- offset,
15017
- timescale
15051
+ decodingTimestamp: timecodeInMicroseconds,
15052
+ offset
15018
15053
  };
15019
15054
  iterator.destroy();
15020
15055
  return {
15021
15056
  type: "audio-sample",
15022
- audioSample
15057
+ audioSample,
15058
+ trackId: trackNumber2,
15059
+ timescale: WEBCODECS_TIMESCALE
15023
15060
  };
15024
15061
  }
15025
15062
  iterator.destroy();
@@ -15180,7 +15217,10 @@ var postprocessEbml = async ({
15180
15217
  avcState
15181
15218
  });
15182
15219
  if (sample.type === "video-sample") {
15183
- await callbacks.onVideoSample(sample.videoSample.trackId, sample.videoSample);
15220
+ await callbacks.onVideoSample({
15221
+ videoSample: sample.videoSample,
15222
+ trackId: sample.trackId
15223
+ });
15184
15224
  return {
15185
15225
  type: "Block",
15186
15226
  value: new Uint8Array([]),
@@ -15188,7 +15228,10 @@ var postprocessEbml = async ({
15188
15228
  };
15189
15229
  }
15190
15230
  if (sample.type === "audio-sample") {
15191
- await callbacks.onAudioSample(sample.audioSample.trackId, sample.audioSample);
15231
+ await callbacks.onAudioSample({
15232
+ audioSample: sample.audioSample,
15233
+ trackId: sample.trackId
15234
+ });
15192
15235
  return {
15193
15236
  type: "Block",
15194
15237
  value: new Uint8Array([]),
@@ -15224,7 +15267,10 @@ var postprocessEbml = async ({
15224
15267
  ...sample.partialVideoSample,
15225
15268
  type: hasReferenceBlock ? "delta" : "key"
15226
15269
  };
15227
- await callbacks.onVideoSample(sample.partialVideoSample.trackId, completeFrame);
15270
+ await callbacks.onVideoSample({
15271
+ videoSample: completeFrame,
15272
+ trackId: sample.trackId
15273
+ });
15228
15274
  }
15229
15275
  return {
15230
15276
  type: "BlockGroup",
@@ -16277,7 +16323,7 @@ var sampleSorter = ({
16277
16323
  if (!callback) {
16278
16324
  throw new Error("No callback found for audio sample");
16279
16325
  }
16280
- latestSample[src] = sample.dts;
16326
+ latestSample[src] = sample.decodingTimestamp;
16281
16327
  await callback(sample);
16282
16328
  },
16283
16329
  addVideoSample: async (src, sample) => {
@@ -16285,7 +16331,7 @@ var sampleSorter = ({
16285
16331
  if (!callback) {
16286
16332
  throw new Error("No callback found for video sample.");
16287
16333
  }
16288
- latestSample[src] = sample.dts;
16334
+ latestSample[src] = sample.decodingTimestamp;
16289
16335
  await callback(sample);
16290
16336
  },
16291
16337
  getNextStreamToRun: (streams) => {
@@ -16780,7 +16826,10 @@ var fetchIdx1 = async ({
16780
16826
  logLevel,
16781
16827
  prefetchCache
16782
16828
  });
16783
- const iterator = getArrayBufferIterator(new Uint8Array, Infinity);
16829
+ if (result.contentLength === null) {
16830
+ throw new Error("Content length is null");
16831
+ }
16832
+ const iterator = getArrayBufferIterator(new Uint8Array, result.contentLength - position + 1);
16784
16833
  while (true) {
16785
16834
  const res = await result.reader.reader.read();
16786
16835
  if (res.value) {
@@ -16880,13 +16929,18 @@ var queuedBFramesState = () => {
16880
16929
  queuedFrames.length = 0;
16881
16930
  };
16882
16931
  return {
16883
- addFrame: (frame, maxFramesInBuffer) => {
16932
+ addFrame: ({
16933
+ frame,
16934
+ maxFramesInBuffer,
16935
+ trackId,
16936
+ timescale
16937
+ }) => {
16884
16938
  if (frame.type === "key") {
16885
16939
  flush();
16886
- releasedFrames.push(frame);
16940
+ releasedFrames.push({ sample: frame, trackId, timescale });
16887
16941
  return;
16888
16942
  }
16889
- queuedFrames.push(frame);
16943
+ queuedFrames.push({ sample: frame, trackId, timescale });
16890
16944
  if (queuedFrames.length > maxFramesInBuffer) {
16891
16945
  releasedFrames.push(queuedFrames.shift());
16892
16946
  }
@@ -16947,22 +17001,25 @@ var riffSampleCounter = () => {
16947
17001
  }
16948
17002
  samplesForTrack[trackId]++;
16949
17003
  };
16950
- const onVideoSample = (videoSample) => {
16951
- if (typeof samplesForTrack[videoSample.trackId] === "undefined") {
16952
- samplesForTrack[videoSample.trackId] = 0;
17004
+ const onVideoSample = ({
17005
+ trackId,
17006
+ videoSample
17007
+ }) => {
17008
+ if (typeof samplesForTrack[trackId] === "undefined") {
17009
+ samplesForTrack[trackId] = 0;
16953
17010
  }
16954
17011
  if (videoSample.type === "key") {
16955
17012
  riffKeys.addKeyframe({
16956
- trackId: videoSample.trackId,
16957
- decodingTimeInSeconds: videoSample.dts / videoSample.timescale,
17013
+ trackId,
17014
+ decodingTimeInSeconds: videoSample.decodingTimestamp / WEBCODECS_TIMESCALE,
16958
17015
  positionInBytes: videoSample.offset,
16959
- presentationTimeInSeconds: videoSample.cts / videoSample.timescale,
17016
+ presentationTimeInSeconds: videoSample.timestamp / WEBCODECS_TIMESCALE,
16960
17017
  sizeInBytes: videoSample.data.length,
16961
17018
  sampleCounts: { ...samplesForTrack }
16962
17019
  });
16963
17020
  }
16964
17021
  if (videoSample.data.length > 0) {
16965
- samplesForTrack[videoSample.trackId]++;
17022
+ samplesForTrack[trackId]++;
16966
17023
  }
16967
17024
  };
16968
17025
  const getSampleCountForTrack = ({ trackId }) => {
@@ -17094,7 +17151,10 @@ var callbacksState = ({
17094
17151
  }
17095
17152
  queuedVideoSamples[id] = [];
17096
17153
  },
17097
- onAudioSample: async (trackId, audioSample) => {
17154
+ onAudioSample: async ({
17155
+ audioSample,
17156
+ trackId
17157
+ }) => {
17098
17158
  if (controller._internals.signal.aborted) {
17099
17159
  throw new Error("Aborted");
17100
17160
  }
@@ -17112,7 +17172,10 @@ var callbacksState = ({
17112
17172
  samplesObserved.addAudioSample(audioSample);
17113
17173
  }
17114
17174
  },
17115
- onVideoSample: async (trackId, videoSample) => {
17175
+ onVideoSample: async ({
17176
+ trackId,
17177
+ videoSample
17178
+ }) => {
17116
17179
  if (controller._internals.signal.aborted) {
17117
17180
  throw new Error("Aborted");
17118
17181
  }
@@ -17129,9 +17192,9 @@ var callbacksState = ({
17129
17192
  if (videoSample.type === "key") {
17130
17193
  keyframes.addKeyframe({
17131
17194
  trackId,
17132
- decodingTimeInSeconds: videoSample.dts / videoSample.timescale,
17195
+ decodingTimeInSeconds: videoSample.decodingTimestamp / WEBCODECS_TIMESCALE,
17133
17196
  positionInBytes: videoSample.offset,
17134
- presentationTimeInSeconds: videoSample.cts / videoSample.timescale,
17197
+ presentationTimeInSeconds: videoSample.timestamp / WEBCODECS_TIMESCALE,
17135
17198
  sizeInBytes: videoSample.data.length
17136
17199
  });
17137
17200
  }
@@ -17186,9 +17249,9 @@ var samplesObservedState = () => {
17186
17249
  return largestSample - smallestSample;
17187
17250
  };
17188
17251
  const addVideoSample = (videoSample) => {
17189
- videoSamples.set(videoSample.cts, videoSample.data.byteLength);
17190
- const presentationTimeInSeconds = videoSample.cts / videoSample.timescale;
17191
- const duration2 = (videoSample.duration ?? 0) / videoSample.timescale;
17252
+ videoSamples.set(videoSample.timestamp, videoSample.data.byteLength);
17253
+ const presentationTimeInSeconds = videoSample.timestamp / WEBCODECS_TIMESCALE;
17254
+ const duration2 = (videoSample.duration ?? 0) / WEBCODECS_TIMESCALE;
17192
17255
  if (largestVideoSample === undefined || presentationTimeInSeconds > largestVideoSample) {
17193
17256
  largestVideoSample = presentationTimeInSeconds + duration2;
17194
17257
  }
@@ -17197,9 +17260,9 @@ var samplesObservedState = () => {
17197
17260
  }
17198
17261
  };
17199
17262
  const addAudioSample = (audioSample) => {
17200
- audioSamples.set(audioSample.cts, audioSample.data.byteLength);
17201
- const presentationTimeInSeconds = audioSample.cts / audioSample.timescale;
17202
- const duration2 = (audioSample.duration ?? 0) / audioSample.timescale;
17263
+ audioSamples.set(audioSample.timestamp, audioSample.data.byteLength);
17264
+ const presentationTimeInSeconds = audioSample.timestamp / WEBCODECS_TIMESCALE;
17265
+ const duration2 = (audioSample.duration ?? 0) / WEBCODECS_TIMESCALE;
17203
17266
  if (largestAudioSample === undefined || presentationTimeInSeconds > largestAudioSample) {
17204
17267
  largestAudioSample = presentationTimeInSeconds + duration2;
17205
17268
  }
@@ -17567,6 +17630,9 @@ var internalParseMedia = async function({
17567
17630
  seekingHints,
17568
17631
  ...more
17569
17632
  }) {
17633
+ if (!src) {
17634
+ throw new Error('No "src" provided');
17635
+ }
17570
17636
  controller._internals.markAsReadyToEmitEvents();
17571
17637
  warnIfRemotionLicenseNotAcknowledged({
17572
17638
  acknowledgeRemotionLicense,
@@ -17779,7 +17845,7 @@ var post = (message) => {
17779
17845
  };
17780
17846
  var controller = mediaParserController();
17781
17847
  var executeCallback = (payload) => {
17782
- const nonce = crypto.randomUUID();
17848
+ const nonce = String(Math.random());
17783
17849
  const { promise, resolve, reject } = withResolvers();
17784
17850
  const cb = (msg) => {
17785
17851
  const data = msg.data;