@remotion/media-parser 4.0.303 → 4.0.305

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (65) hide show
  1. package/dist/containers/aac/parse-aac.js +8 -6
  2. package/dist/containers/flac/parse-flac-frame.js +6 -6
  3. package/dist/containers/flac/parse-streaminfo.js +3 -1
  4. package/dist/containers/iso-base-media/find-keyframe-before-time.js +2 -2
  5. package/dist/containers/iso-base-media/get-keyframes.js +3 -3
  6. package/dist/containers/iso-base-media/get-sample-position-bounds.js +2 -2
  7. package/dist/containers/iso-base-media/get-seeking-byte-from-fragmented-mp4.js +3 -3
  8. package/dist/containers/iso-base-media/get-seeking-byte.js +1 -1
  9. package/dist/containers/iso-base-media/make-track.js +7 -3
  10. package/dist/containers/iso-base-media/mdat/calculate-jump-marks.js +3 -3
  11. package/dist/containers/iso-base-media/mdat/mdat.js +16 -16
  12. package/dist/containers/iso-base-media/parse-icc-profile.js +2 -2
  13. package/dist/containers/m3u/first-sample-in-m3u-chunk.js +2 -1
  14. package/dist/containers/m3u/sample-sorter.js +2 -2
  15. package/dist/containers/mp3/parse-mpeg-header.js +7 -2
  16. package/dist/containers/mp3/seek/audio-sample-from-cbr.js +4 -6
  17. package/dist/containers/mp3/seek/audio-sample-from-vbr.js +4 -6
  18. package/dist/containers/riff/convert-queued-sample-to-mediaparser-sample.d.ts +6 -5
  19. package/dist/containers/riff/convert-queued-sample-to-mediaparser-sample.js +7 -7
  20. package/dist/containers/riff/get-tracks-from-avi.js +5 -2
  21. package/dist/containers/riff/parse-movi.js +35 -25
  22. package/dist/containers/riff/parse-riff-body.js +13 -3
  23. package/dist/containers/riff/seek/fetch-idx1.js +4 -1
  24. package/dist/containers/transport-stream/handle-aac-packet.js +8 -7
  25. package/dist/containers/transport-stream/handle-avc-packet.js +8 -7
  26. package/dist/containers/wav/parse-fmt.js +3 -1
  27. package/dist/containers/wav/parse-media-section.js +5 -5
  28. package/dist/containers/webm/get-sample-from-block.d.ts +6 -0
  29. package/dist/containers/webm/get-sample-from-block.js +9 -8
  30. package/dist/containers/webm/make-track.js +5 -2
  31. package/dist/containers/webm/parse-ebml.js +12 -3
  32. package/dist/containers/webm/seek/seeking-hints.js +1 -1
  33. package/dist/convert-audio-or-video-sample.js +6 -9
  34. package/dist/download-and-parse-media.js +3 -0
  35. package/dist/esm/index.mjs +266 -196
  36. package/dist/esm/worker-server-entry.mjs +262 -196
  37. package/dist/esm/worker-web-entry.mjs +262 -196
  38. package/dist/get-duration.js +2 -2
  39. package/dist/get-sample-positions-from-mp4.js +2 -2
  40. package/dist/get-sample-positions.d.ts +2 -2
  41. package/dist/get-sample-positions.js +2 -2
  42. package/dist/get-tracks.d.ts +6 -3
  43. package/dist/index.d.ts +24 -5
  44. package/dist/index.js +3 -1
  45. package/dist/internal-parse-media.js +3 -0
  46. package/dist/iterator/buffer-manager.js +1 -1
  47. package/dist/parse-media.js +3 -0
  48. package/dist/samples-from-moof.js +2 -2
  49. package/dist/state/iso-base-media/cached-sample-positions.d.ts +2 -2
  50. package/dist/state/parser-state.d.ts +23 -5
  51. package/dist/state/riff/queued-frames.d.ts +14 -3
  52. package/dist/state/riff/queued-frames.js +3 -3
  53. package/dist/state/riff/sample-counter.d.ts +4 -1
  54. package/dist/state/riff/sample-counter.js +8 -7
  55. package/dist/state/riff.d.ts +15 -3
  56. package/dist/state/sample-callbacks.d.ts +8 -2
  57. package/dist/state/sample-callbacks.js +5 -4
  58. package/dist/state/samples-observed/slow-duration-fps.js +7 -6
  59. package/dist/version.d.ts +1 -1
  60. package/dist/version.js +1 -1
  61. package/dist/webcodec-sample-types.d.ts +3 -9
  62. package/dist/webcodecs-timescale.d.ts +1 -0
  63. package/dist/webcodecs-timescale.js +4 -0
  64. package/dist/worker-server.js +2 -1
  65. package/package.json +3 -3
@@ -1170,7 +1170,7 @@ var bufferManager = ({
1170
1170
  counter
1171
1171
  }) => {
1172
1172
  const buf = new ArrayBuffer(initialData.byteLength, {
1173
- maxByteLength: maxBytes === null ? initialData.byteLength : Math.min(maxBytes, 2 ** 32)
1173
+ maxByteLength: maxBytes === null ? initialData.byteLength : Math.min(maxBytes, 2 ** 31)
1174
1174
  });
1175
1175
  if (!buf.resize) {
1176
1176
  throw new Error("`ArrayBuffer.resize` is not supported in this Runtime. On the server: Use at least Node.js 20 or Bun. In the browser: Chrome 111, Edge 111, Safari 16.4, Firefox 128, Opera 111");
@@ -2338,6 +2338,9 @@ var findTrackStartTimeInSeconds = ({
2338
2338
  return dwellTime / movieTimeScale;
2339
2339
  };
2340
2340
 
2341
+ // src/webcodecs-timescale.ts
2342
+ var WEBCODECS_TIMESCALE = 1e6;
2343
+
2341
2344
  // src/containers/riff/timescale.ts
2342
2345
  var MEDIA_PARSER_RIFF_TIMESCALE = 1e6;
2343
2346
 
@@ -2390,9 +2393,10 @@ var makeAviAudioTrack = ({
2390
2393
  description: new Uint8Array([18, 16]),
2391
2394
  numberOfChannels: strf.numberOfChannels,
2392
2395
  sampleRate: strf.sampleRate,
2393
- timescale: MEDIA_PARSER_RIFF_TIMESCALE,
2396
+ originalTimescale: MEDIA_PARSER_RIFF_TIMESCALE,
2394
2397
  trackId: index,
2395
- startInSeconds: 0
2398
+ startInSeconds: 0,
2399
+ timescale: WEBCODECS_TIMESCALE
2396
2400
  };
2397
2401
  };
2398
2402
  var makeAviVideoTrack = ({
@@ -2413,7 +2417,7 @@ var makeAviVideoTrack = ({
2413
2417
  height: strf.height,
2414
2418
  type: "video",
2415
2419
  displayAspectHeight: strf.height,
2416
- timescale: MEDIA_PARSER_RIFF_TIMESCALE,
2420
+ originalTimescale: MEDIA_PARSER_RIFF_TIMESCALE,
2417
2421
  description: undefined,
2418
2422
  m3uStreamFormat: null,
2419
2423
  trackId: index,
@@ -2436,7 +2440,8 @@ var makeAviVideoTrack = ({
2436
2440
  denominator: 1
2437
2441
  },
2438
2442
  fps: strh.rate / strh.scale,
2439
- startInSeconds: 0
2443
+ startInSeconds: 0,
2444
+ timescale: WEBCODECS_TIMESCALE
2440
2445
  };
2441
2446
  };
2442
2447
  var getTracksFromAvi = (structure, state) => {
@@ -3210,7 +3215,7 @@ var getTrack = ({
3210
3215
  numerator: 1,
3211
3216
  denominator: 1
3212
3217
  },
3213
- timescale,
3218
+ originalTimescale: timescale,
3214
3219
  codedHeight: height.value.value,
3215
3220
  codedWidth: width.value.value,
3216
3221
  displayAspectHeight: displayHeight2 ? displayHeight2.value.value : height.value.value,
@@ -3221,7 +3226,8 @@ var getTrack = ({
3221
3226
  advancedColor,
3222
3227
  codecEnum,
3223
3228
  fps: null,
3224
- startInSeconds: 0
3229
+ startInSeconds: 0,
3230
+ timescale: WEBCODECS_TIMESCALE
3225
3231
  };
3226
3232
  }
3227
3233
  if (trackTypeToString(trackType2.value.value) === "audio") {
@@ -3236,7 +3242,7 @@ var getTrack = ({
3236
3242
  type: "audio",
3237
3243
  trackId,
3238
3244
  codec: codecString,
3239
- timescale,
3245
+ originalTimescale: timescale,
3240
3246
  numberOfChannels,
3241
3247
  sampleRate,
3242
3248
  description: getAudioDescription(track),
@@ -3244,7 +3250,8 @@ var getTrack = ({
3244
3250
  codecEnum: getMatroskaAudioCodecEnum({
3245
3251
  track
3246
3252
  }),
3247
- startInSeconds: 0
3253
+ startInSeconds: 0,
3254
+ timescale: WEBCODECS_TIMESCALE
3248
3255
  };
3249
3256
  }
3250
3257
  return null;
@@ -4143,23 +4150,25 @@ var makeBaseMediaTrack = (trakBox, startTimeInSeconds) => {
4143
4150
  return {
4144
4151
  type: "audio",
4145
4152
  trackId: tkhdBox.trackId,
4146
- timescale: timescaleAndDuration.timescale,
4153
+ originalTimescale: timescaleAndDuration.timescale,
4147
4154
  codec: codecString,
4148
4155
  numberOfChannels: actual.numberOfChannels,
4149
4156
  sampleRate: actual.sampleRate,
4150
4157
  description: actual.codecPrivate?.data ?? undefined,
4151
4158
  codecData: actual.codecPrivate,
4152
4159
  codecEnum,
4153
- startInSeconds: startTimeInSeconds
4160
+ startInSeconds: startTimeInSeconds,
4161
+ timescale: WEBCODECS_TIMESCALE
4154
4162
  };
4155
4163
  }
4156
4164
  if (!trakBoxContainsVideo(trakBox)) {
4157
4165
  return {
4158
4166
  type: "other",
4159
4167
  trackId: tkhdBox.trackId,
4160
- timescale: timescaleAndDuration.timescale,
4168
+ originalTimescale: timescaleAndDuration.timescale,
4161
4169
  trakBox,
4162
- startInSeconds: startTimeInSeconds
4170
+ startInSeconds: startTimeInSeconds,
4171
+ timescale: WEBCODECS_TIMESCALE
4163
4172
  };
4164
4173
  }
4165
4174
  const videoSample = getStsdVideoConfig(trakBox);
@@ -4192,7 +4201,7 @@ var makeBaseMediaTrack = (trakBox, startTimeInSeconds) => {
4192
4201
  type: "video",
4193
4202
  trackId: tkhdBox.trackId,
4194
4203
  description: videoDescriptors ?? undefined,
4195
- timescale: timescaleAndDuration.timescale,
4204
+ originalTimescale: timescaleAndDuration.timescale,
4196
4205
  codec,
4197
4206
  sampleAspectRatio: getSampleAspectRatio(trakBox),
4198
4207
  width,
@@ -4207,7 +4216,8 @@ var makeBaseMediaTrack = (trakBox, startTimeInSeconds) => {
4207
4216
  advancedColor,
4208
4217
  codecEnum: getVideoCodecFromIsoTrak(trakBox),
4209
4218
  fps: getFpsFromMp4TrakBox(trakBox),
4210
- startInSeconds: startTimeInSeconds
4219
+ startInSeconds: startTimeInSeconds,
4220
+ timescale: WEBCODECS_TIMESCALE
4211
4221
  };
4212
4222
  return track;
4213
4223
  };
@@ -4523,7 +4533,7 @@ var parseAvcc = ({
4523
4533
 
4524
4534
  // src/containers/iso-base-media/parse-icc-profile.ts
4525
4535
  var parseIccProfile = (data) => {
4526
- const iterator = getArrayBufferIterator(data, Infinity);
4536
+ const iterator = getArrayBufferIterator(data, data.length);
4527
4537
  const size = iterator.getUint32();
4528
4538
  if (size !== data.length) {
4529
4539
  throw new Error("Invalid ICC profile size");
@@ -4568,7 +4578,7 @@ var parseIccProfile = (data) => {
4568
4578
  for (const entry of entries) {
4569
4579
  const found = data.slice(entry.offset, entry.offset + entry.size);
4570
4580
  if (entry.tag === "rXYZ" || entry.tag === "gXYZ" || entry.tag === "bXYZ" || entry.tag === "wtpt") {
4571
- const it = getArrayBufferIterator(found, Infinity);
4581
+ const it = getArrayBufferIterator(found, found.length);
4572
4582
  it.discard(4);
4573
4583
  const x = it.getInt32() / 65536;
4574
4584
  const y = it.getInt32() / 65536;
@@ -6338,19 +6348,18 @@ var getSampleFromBlock = async ({
6338
6348
  if (codec.startsWith("V_")) {
6339
6349
  const partialVideoSample = {
6340
6350
  data: iterator.getSlice(remainingNow),
6341
- cts: timecodeInMicroseconds,
6342
- dts: timecodeInMicroseconds,
6351
+ decodingTimestamp: timecodeInMicroseconds,
6343
6352
  duration: undefined,
6344
- trackId: trackNumber2,
6345
6353
  timestamp: timecodeInMicroseconds,
6346
- offset,
6347
- timescale
6354
+ offset
6348
6355
  };
6349
6356
  if (keyframe === null) {
6350
6357
  iterator.destroy();
6351
6358
  return {
6352
6359
  type: "partial-video-sample",
6353
- partialVideoSample
6360
+ partialVideoSample,
6361
+ trackId: trackNumber2,
6362
+ timescale: WEBCODECS_TIMESCALE
6354
6363
  };
6355
6364
  }
6356
6365
  await addAvcToTrackAndActivateTrackIfNecessary({
@@ -6371,25 +6380,26 @@ var getSampleFromBlock = async ({
6371
6380
  iterator.destroy();
6372
6381
  return {
6373
6382
  type: "video-sample",
6374
- videoSample: sample
6383
+ videoSample: sample,
6384
+ trackId: trackNumber2,
6385
+ timescale: WEBCODECS_TIMESCALE
6375
6386
  };
6376
6387
  }
6377
6388
  if (codec.startsWith("A_")) {
6378
6389
  const audioSample = {
6379
6390
  data: iterator.getSlice(remainingNow),
6380
- trackId: trackNumber2,
6381
6391
  timestamp: timecodeInMicroseconds,
6382
6392
  type: "key",
6383
6393
  duration: undefined,
6384
- cts: timecodeInMicroseconds,
6385
- dts: timecodeInMicroseconds,
6386
- offset,
6387
- timescale
6394
+ decodingTimestamp: timecodeInMicroseconds,
6395
+ offset
6388
6396
  };
6389
6397
  iterator.destroy();
6390
6398
  return {
6391
6399
  type: "audio-sample",
6392
- audioSample
6400
+ audioSample,
6401
+ trackId: trackNumber2,
6402
+ timescale: WEBCODECS_TIMESCALE
6393
6403
  };
6394
6404
  }
6395
6405
  iterator.destroy();
@@ -6550,7 +6560,10 @@ var postprocessEbml = async ({
6550
6560
  avcState
6551
6561
  });
6552
6562
  if (sample.type === "video-sample") {
6553
- await callbacks.onVideoSample(sample.videoSample.trackId, sample.videoSample);
6563
+ await callbacks.onVideoSample({
6564
+ videoSample: sample.videoSample,
6565
+ trackId: sample.trackId
6566
+ });
6554
6567
  return {
6555
6568
  type: "Block",
6556
6569
  value: new Uint8Array([]),
@@ -6558,7 +6571,10 @@ var postprocessEbml = async ({
6558
6571
  };
6559
6572
  }
6560
6573
  if (sample.type === "audio-sample") {
6561
- await callbacks.onAudioSample(sample.audioSample.trackId, sample.audioSample);
6574
+ await callbacks.onAudioSample({
6575
+ audioSample: sample.audioSample,
6576
+ trackId: sample.trackId
6577
+ });
6562
6578
  return {
6563
6579
  type: "Block",
6564
6580
  value: new Uint8Array([]),
@@ -6594,7 +6610,10 @@ var postprocessEbml = async ({
6594
6610
  ...sample.partialVideoSample,
6595
6611
  type: hasReferenceBlock ? "delta" : "key"
6596
6612
  };
6597
- await callbacks.onVideoSample(sample.partialVideoSample.trackId, completeFrame);
6613
+ await callbacks.onVideoSample({
6614
+ videoSample: completeFrame,
6615
+ trackId: sample.trackId
6616
+ });
6598
6617
  }
6599
6618
  return {
6600
6619
  type: "BlockGroup",
@@ -7090,8 +7109,8 @@ var getSamplesFromTraf = (trafSegment, moofOffset) => {
7090
7109
  const dts = time + (tfdtBox?.baseMediaDecodeTime ?? 0);
7091
7110
  const samplePosition = {
7092
7111
  offset: offset + (moofOffset ?? 0) + (dataOffset ?? 0),
7093
- dts,
7094
- cts: dts + (sample.sampleCompositionTimeOffset ?? 0),
7112
+ decodingTimestamp: dts,
7113
+ timestamp: dts + (sample.sampleCompositionTimeOffset ?? 0),
7095
7114
  duration: duration2,
7096
7115
  isKeyframe: keyframe,
7097
7116
  size,
@@ -7182,8 +7201,8 @@ var getSamplePositions = ({
7182
7201
  offset: Number(chunks[i]) + offsetInThisChunk,
7183
7202
  size,
7184
7203
  isKeyframe,
7185
- dts,
7186
- cts,
7204
+ decodingTimestamp: dts,
7205
+ timestamp: cts,
7187
7206
  duration: delta,
7188
7207
  chunk: i,
7189
7208
  bigEndian: false,
@@ -7232,8 +7251,8 @@ var getGroupedSamplesPositionsFromMp4 = ({
7232
7251
  }
7233
7252
  samples.push({
7234
7253
  chunk,
7235
- cts: timestamp,
7236
- dts: timestamp,
7254
+ timestamp,
7255
+ decodingTimestamp: timestamp,
7237
7256
  offset: Number(entry),
7238
7257
  size: stszBox.sampleSize * samplesPerChunk,
7239
7258
  duration: samplesPerChunk,
@@ -7593,7 +7612,7 @@ var getDurationFromIsoBaseMedia = (parserState) => {
7593
7612
  }
7594
7613
  const tracks2 = getTracks(parserState, true);
7595
7614
  const allSamples = tracks2.map((t) => {
7596
- const { timescale: ts } = t;
7615
+ const { originalTimescale: ts } = t;
7597
7616
  const trakBox = getTrakBoxByTrackId(moovBox, t.trackId);
7598
7617
  if (!trakBox) {
7599
7618
  return null;
@@ -7609,7 +7628,7 @@ var getDurationFromIsoBaseMedia = (parserState) => {
7609
7628
  if (samplePositions.length === 0) {
7610
7629
  return null;
7611
7630
  }
7612
- const highest = samplePositions?.map((sp) => (sp.cts + sp.duration) / ts).reduce((a, b) => Math.max(a, b), 0);
7631
+ const highest = samplePositions?.map((sp) => (sp.timestamp + sp.duration) / ts).reduce((a, b) => Math.max(a, b), 0);
7613
7632
  return highest ?? 0;
7614
7633
  });
7615
7634
  if (allSamples.every((s) => s === null)) {
@@ -7696,7 +7715,7 @@ var getKeyframesFromIsoBaseMedia = (state) => {
7696
7715
  return [];
7697
7716
  }
7698
7717
  const allSamples = videoTracks.map((t) => {
7699
- const { timescale: ts } = t;
7718
+ const { originalTimescale: ts } = t;
7700
7719
  const trakBox = getTrakBoxByTrackId(moov, t.trackId);
7701
7720
  if (!trakBox) {
7702
7721
  return [];
@@ -7717,8 +7736,8 @@ var getKeyframesFromIsoBaseMedia = (state) => {
7717
7736
  }).map((k) => {
7718
7737
  return {
7719
7738
  trackId: t.trackId,
7720
- presentationTimeInSeconds: k.cts / ts,
7721
- decodingTimeInSeconds: k.dts / ts,
7739
+ presentationTimeInSeconds: k.timestamp / ts,
7740
+ decodingTimeInSeconds: k.decodingTimestamp / ts,
7722
7741
  positionInBytes: k.offset,
7723
7742
  sizeInBytes: k.size
7724
7743
  };
@@ -8127,8 +8146,8 @@ var findKeyframeBeforeTime = ({
8127
8146
  let videoByte = 0;
8128
8147
  let videoSample = null;
8129
8148
  for (const sample of samplePositions) {
8130
- const ctsInSeconds = sample.cts / timescale + startInSeconds;
8131
- const dtsInSeconds = sample.dts / timescale + startInSeconds;
8149
+ const ctsInSeconds = sample.timestamp / timescale + startInSeconds;
8150
+ const dtsInSeconds = sample.decodingTimestamp / timescale + startInSeconds;
8132
8151
  if (!sample.isKeyframe) {
8133
8152
  continue;
8134
8153
  }
@@ -8280,8 +8299,8 @@ var getSamplePositionBounds = (samplePositions, timescale) => {
8280
8299
  let min = Infinity;
8281
8300
  let max = -Infinity;
8282
8301
  for (const samplePosition of samplePositions) {
8283
- const timestampMin = Math.min(samplePosition.cts, samplePosition.dts);
8284
- const timestampMax = Math.max(samplePosition.cts, samplePosition.dts) + (samplePosition.duration ?? 0);
8302
+ const timestampMin = Math.min(samplePosition.timestamp, samplePosition.decodingTimestamp);
8303
+ const timestampMax = Math.max(samplePosition.timestamp, samplePosition.decodingTimestamp) + (samplePosition.duration ?? 0);
8285
8304
  if (timestampMin < min) {
8286
8305
  min = timestampMin;
8287
8306
  }
@@ -8365,13 +8384,13 @@ var getSeekingByteFromFragmentedMp4 = async ({
8365
8384
  });
8366
8385
  Log.trace(logLevel, "Fragmented MP4 - Checking if we have seeking info for this time range");
8367
8386
  for (const positions of samplePositionsArray) {
8368
- const { min, max } = getSamplePositionBounds(positions.samples, firstTrack.timescale);
8387
+ const { min, max } = getSamplePositionBounds(positions.samples, firstTrack.originalTimescale);
8369
8388
  if (min <= time && (positions.isLastFragment || isLastChunkInPlaylist || time <= max)) {
8370
8389
  Log.trace(logLevel, `Fragmented MP4 - Found that we have seeking info for this time range: ${min} <= ${time} <= ${max}`);
8371
8390
  const kf = findKeyframeBeforeTime({
8372
8391
  samplePositions: positions.samples,
8373
8392
  time,
8374
- timescale: firstTrack.timescale,
8393
+ timescale: firstTrack.originalTimescale,
8375
8394
  logLevel,
8376
8395
  mediaSections: info.mediaSections,
8377
8396
  startInSeconds: firstTrack.startInSeconds
@@ -8390,7 +8409,7 @@ var getSeekingByteFromFragmentedMp4 = async ({
8390
8409
  mfra: atom,
8391
8410
  time,
8392
8411
  firstTrack,
8393
- timescale: firstTrack.timescale
8412
+ timescale: firstTrack.originalTimescale
8394
8413
  });
8395
8414
  if (moofOffset !== null && !(moofOffset.start <= currentPosition && currentPosition < moofOffset.end)) {
8396
8415
  Log.verbose(logLevel, `Fragmented MP4 - Found based on mfra information that we should seek to: ${moofOffset.start} ${moofOffset.end}`);
@@ -8474,7 +8493,7 @@ var getSeekingByteFromIsoBaseMedia = ({
8474
8493
  const keyframe = findKeyframeBeforeTime({
8475
8494
  samplePositions,
8476
8495
  time,
8477
- timescale: track.timescale,
8496
+ timescale: track.originalTimescale,
8478
8497
  logLevel,
8479
8498
  mediaSections: info.mediaSections,
8480
8499
  startInSeconds: track.startInSeconds
@@ -8952,7 +8971,6 @@ var getSeekingByteFromMatroska = async ({
8952
8971
  };
8953
8972
 
8954
8973
  // src/convert-audio-or-video-sample.ts
8955
- var TARGET_TIMESCALE = 1e6;
8956
8974
  var fixFloat = (value) => {
8957
8975
  if (value % 1 < 0.0000001) {
8958
8976
  return Math.floor(value);
@@ -8966,20 +8984,17 @@ var convertAudioOrVideoSampleToWebCodecsTimestamps = ({
8966
8984
  sample,
8967
8985
  timescale
8968
8986
  }) => {
8969
- if (timescale === TARGET_TIMESCALE) {
8987
+ if (timescale === WEBCODECS_TIMESCALE) {
8970
8988
  return sample;
8971
8989
  }
8972
- const { cts, dts, timestamp } = sample;
8990
+ const { decodingTimestamp: dts, timestamp } = sample;
8973
8991
  return {
8974
- cts: fixFloat(cts * (TARGET_TIMESCALE / timescale)),
8975
- dts: fixFloat(dts * (TARGET_TIMESCALE / timescale)),
8976
- timestamp: fixFloat(timestamp * (TARGET_TIMESCALE / timescale)),
8977
- duration: sample.duration === undefined ? undefined : fixFloat(sample.duration * (TARGET_TIMESCALE / timescale)),
8992
+ decodingTimestamp: fixFloat(dts * (WEBCODECS_TIMESCALE / timescale)),
8993
+ timestamp: fixFloat(timestamp * (WEBCODECS_TIMESCALE / timescale)),
8994
+ duration: sample.duration === undefined ? undefined : fixFloat(sample.duration * (WEBCODECS_TIMESCALE / timescale)),
8978
8995
  data: sample.data,
8979
- trackId: sample.trackId,
8980
8996
  type: sample.type,
8981
8997
  offset: sample.offset,
8982
- timescale: TARGET_TIMESCALE,
8983
8998
  ..."avc" in sample ? { avc: sample.avc } : {}
8984
8999
  };
8985
9000
  };
@@ -9069,7 +9084,7 @@ var handleAvcPacket = async ({
9069
9084
  rotation: 0,
9070
9085
  trackId: programId,
9071
9086
  type: "video",
9072
- timescale: MPEG_TIMESCALE,
9087
+ originalTimescale: MPEG_TIMESCALE,
9073
9088
  codec: getCodecStringFromSpsAndPps(spsAndPps.sps),
9074
9089
  codecData: { type: "avc-sps-pps", data: codecPrivate2 },
9075
9090
  fps: null,
@@ -9087,7 +9102,8 @@ var handleAvcPacket = async ({
9087
9102
  },
9088
9103
  colorSpace: mediaParserAdvancedColorToWebCodecsColor(advancedColor),
9089
9104
  advancedColor,
9090
- startInSeconds: 0
9105
+ startInSeconds: 0,
9106
+ timescale: WEBCODECS_TIMESCALE
9091
9107
  };
9092
9108
  await registerVideoTrack({
9093
9109
  track,
@@ -9100,15 +9116,12 @@ var handleAvcPacket = async ({
9100
9116
  }
9101
9117
  const type = getKeyFrameOrDeltaFromAvcInfo(avc);
9102
9118
  const sample = {
9103
- cts: streamBuffer.pesHeader.pts - transportStream.startOffset.getOffset(programId),
9104
- dts: (streamBuffer.pesHeader.dts ?? streamBuffer.pesHeader.pts) - transportStream.startOffset.getOffset(programId),
9119
+ decodingTimestamp: (streamBuffer.pesHeader.dts ?? streamBuffer.pesHeader.pts) - transportStream.startOffset.getOffset(programId),
9105
9120
  timestamp: streamBuffer.pesHeader.pts - transportStream.startOffset.getOffset(programId),
9106
9121
  duration: undefined,
9107
9122
  data: streamBuffer.getBuffer(),
9108
- trackId: programId,
9109
9123
  type: type === "bidirectional" ? "delta" : type,
9110
- offset,
9111
- timescale: MPEG_TIMESCALE
9124
+ offset
9112
9125
  };
9113
9126
  if (type === "key") {
9114
9127
  transportStream.observedPesHeaders.markPtsAsKeyframe(streamBuffer.pesHeader.pts);
@@ -9117,7 +9130,10 @@ var handleAvcPacket = async ({
9117
9130
  sample,
9118
9131
  timescale: MPEG_TIMESCALE
9119
9132
  });
9120
- await sampleCallbacks.onVideoSample(programId, videoSample);
9133
+ await sampleCallbacks.onVideoSample({
9134
+ videoSample,
9135
+ trackId: programId
9136
+ });
9121
9137
  transportStream.lastEmittedSample.setLastEmittedSample(sample);
9122
9138
  };
9123
9139
 
@@ -9465,7 +9481,7 @@ var getSeekingHintsFromMatroska = (tracksState, keyframesState, webmState) => {
9465
9481
  return {
9466
9482
  type: "webm-seeking-hints",
9467
9483
  track: firstVideoTrack ? {
9468
- timescale: firstVideoTrack.timescale,
9484
+ timescale: firstVideoTrack.originalTimescale,
9469
9485
  trackId: firstVideoTrack.trackId
9470
9486
  } : null,
9471
9487
  keyframes,
@@ -10594,10 +10610,11 @@ var parseAac = async (state) => {
10594
10610
  description: codecPrivate2,
10595
10611
  numberOfChannels: channelConfiguration,
10596
10612
  sampleRate,
10597
- timescale: 1e6,
10613
+ originalTimescale: WEBCODECS_TIMESCALE,
10598
10614
  trackId: 0,
10599
10615
  type: "audio",
10600
- startInSeconds: 0
10616
+ startInSeconds: 0,
10617
+ timescale: WEBCODECS_TIMESCALE
10601
10618
  },
10602
10619
  registerAudioSampleCallback: state.callbacks.registerAudioSampleCallback,
10603
10620
  tracks: state.callbacks.tracks,
@@ -10620,15 +10637,15 @@ var parseAac = async (state) => {
10620
10637
  type: "key",
10621
10638
  data,
10622
10639
  offset: startOffset,
10623
- timescale: 1e6,
10624
- trackId: 0,
10625
- cts: timestamp,
10626
- dts: timestamp,
10640
+ decodingTimestamp: timestamp,
10627
10641
  timestamp
10628
10642
  },
10629
10643
  timescale: 1
10630
10644
  });
10631
- await state.callbacks.onAudioSample(0, audioSample);
10645
+ await state.callbacks.onAudioSample({
10646
+ audioSample,
10647
+ trackId: 0
10648
+ });
10632
10649
  return Promise.resolve(null);
10633
10650
  };
10634
10651
 
@@ -10796,7 +10813,7 @@ var emitSample = async ({
10796
10813
  data,
10797
10814
  offset
10798
10815
  }) => {
10799
- const iterator = getArrayBufferIterator(data, null);
10816
+ const iterator = getArrayBufferIterator(data, data.length);
10800
10817
  const parsed = parseFrameHeader({ iterator, state });
10801
10818
  if (!parsed) {
10802
10819
  throw new Error("Invalid CRC");
@@ -10821,17 +10838,17 @@ var emitSample = async ({
10821
10838
  sample: {
10822
10839
  data,
10823
10840
  duration: duration2,
10824
- cts: timestamp,
10825
- dts: timestamp,
10841
+ decodingTimestamp: timestamp,
10826
10842
  timestamp,
10827
10843
  type: "key",
10828
- offset,
10829
- timescale: 1,
10830
- trackId: 0
10844
+ offset
10831
10845
  },
10832
10846
  timescale: 1
10833
10847
  });
10834
- await state.callbacks.onAudioSample(0, audioSample);
10848
+ await state.callbacks.onAudioSample({
10849
+ audioSample,
10850
+ trackId: 0
10851
+ });
10835
10852
  iterator.destroy();
10836
10853
  };
10837
10854
  var parseFlacFrame = async ({
@@ -10987,9 +11004,10 @@ var parseStreamInfo = async ({
10987
11004
  codecEnum: "flac",
10988
11005
  numberOfChannels: channels2,
10989
11006
  sampleRate,
10990
- timescale: 1e6,
11007
+ originalTimescale: WEBCODECS_TIMESCALE,
10991
11008
  trackId: 0,
10992
- startInSeconds: 0
11009
+ startInSeconds: 0,
11010
+ timescale: WEBCODECS_TIMESCALE
10993
11011
  },
10994
11012
  registerAudioSampleCallback: state.callbacks.registerAudioSampleCallback,
10995
11013
  tracks: state.callbacks.tracks,
@@ -11406,7 +11424,7 @@ var getMoovAtom = async ({
11406
11424
  // src/containers/iso-base-media/mdat/calculate-jump-marks.ts
11407
11425
  var MAX_SPREAD_IN_SECONDS = 8;
11408
11426
  var getKey = (samplePositionTrack) => {
11409
- return `${samplePositionTrack.track.trackId}-${samplePositionTrack.samplePosition.dts}`;
11427
+ return `${samplePositionTrack.track.trackId}-${samplePositionTrack.samplePosition.decodingTimestamp}`;
11410
11428
  };
11411
11429
  var findBestJump = ({
11412
11430
  allSamplesSortedByOffset,
@@ -11499,7 +11517,7 @@ var calculateJumpMarks = (samplePositionTracks, endOfMdat) => {
11499
11517
  addFinalJumpIfNecessary();
11500
11518
  break;
11501
11519
  }
11502
- const timestamp = currentSamplePosition.samplePosition.dts / currentSamplePosition.track.timescale;
11520
+ const timestamp = currentSamplePosition.samplePosition.decodingTimestamp / currentSamplePosition.track.originalTimescale;
11503
11521
  progresses[currentSamplePosition.track.trackId] = timestamp;
11504
11522
  const progressValues = Object.values(progresses);
11505
11523
  const maxProgress = Math.max(...progressValues);
@@ -11618,17 +11636,17 @@ var parseMdatSection = async (state) => {
11618
11636
  return makeFetchMoreData(samplesWithIndex.samplePosition.size - iterator.bytesRemaining());
11619
11637
  }
11620
11638
  const {
11621
- cts: rawCts,
11622
- dts: rawDts,
11639
+ timestamp: rawCts,
11640
+ decodingTimestamp: rawDts,
11623
11641
  duration: duration2,
11624
11642
  isKeyframe,
11625
11643
  offset,
11626
11644
  bigEndian,
11627
11645
  chunkSize
11628
11646
  } = samplesWithIndex.samplePosition;
11629
- const { timescale, startInSeconds } = samplesWithIndex.track;
11630
- const cts = rawCts + startInSeconds * timescale;
11631
- const dts = rawDts + startInSeconds * timescale;
11647
+ const { originalTimescale, startInSeconds } = samplesWithIndex.track;
11648
+ const cts = rawCts + startInSeconds * originalTimescale;
11649
+ const dts = rawDts + startInSeconds * originalTimescale;
11632
11650
  const bytes = postprocessBytes({
11633
11651
  bytes: iterator.getSlice(samplesWithIndex.samplePosition.size),
11634
11652
  bigEndian,
@@ -11640,16 +11658,16 @@ var parseMdatSection = async (state) => {
11640
11658
  data: bytes,
11641
11659
  timestamp: cts,
11642
11660
  duration: duration2,
11643
- cts,
11644
- dts,
11645
- trackId: samplesWithIndex.track.trackId,
11661
+ decodingTimestamp: dts,
11646
11662
  type: isKeyframe ? "key" : "delta",
11647
- offset,
11648
- timescale
11663
+ offset
11649
11664
  },
11650
- timescale
11665
+ timescale: originalTimescale
11666
+ });
11667
+ await state.callbacks.onAudioSample({
11668
+ audioSample,
11669
+ trackId: samplesWithIndex.track.trackId
11651
11670
  });
11652
- await state.callbacks.onAudioSample(samplesWithIndex.track.trackId, audioSample);
11653
11671
  }
11654
11672
  if (samplesWithIndex.track.type === "video") {
11655
11673
  const nalUnitType = bytes[4] & 31;
@@ -11663,16 +11681,16 @@ var parseMdatSection = async (state) => {
11663
11681
  data: bytes,
11664
11682
  timestamp: cts,
11665
11683
  duration: duration2,
11666
- cts,
11667
- dts,
11668
- trackId: samplesWithIndex.track.trackId,
11684
+ decodingTimestamp: dts,
11669
11685
  type: isKeyframe && !isRecoveryPoint ? "key" : "delta",
11670
- offset,
11671
- timescale
11686
+ offset
11672
11687
  },
11673
- timescale
11688
+ timescale: originalTimescale
11689
+ });
11690
+ await state.callbacks.onVideoSample({
11691
+ videoSample,
11692
+ trackId: samplesWithIndex.track.trackId
11674
11693
  });
11675
- await state.callbacks.onVideoSample(samplesWithIndex.track.trackId, videoSample);
11676
11694
  }
11677
11695
  const jump = jumpMarks.find((j) => j.afterSampleWithOffset === offset);
11678
11696
  if (jump) {
@@ -12429,6 +12447,9 @@ var webReader = {
12429
12447
  };
12430
12448
  // src/parse-media.ts
12431
12449
  var parseMedia = (options) => {
12450
+ if (!options) {
12451
+ return Promise.reject(new Error("No options provided. See https://www.remotion.dev/media-parser for how to get started."));
12452
+ }
12432
12453
  return internalParseMedia({
12433
12454
  fields: options.fields ?? null,
12434
12455
  logLevel: options.logLevel ?? "info",
@@ -12496,7 +12517,7 @@ var considerSeekBasedOnChunk = async ({
12496
12517
  await callback(sample);
12497
12518
  return;
12498
12519
  }
12499
- const timestamp = Math.min(sample.dts / sample.timescale, sample.cts / sample.timescale);
12520
+ const timestamp = Math.min(sample.decodingTimestamp / WEBCODECS_TIMESCALE, sample.timestamp / WEBCODECS_TIMESCALE);
12500
12521
  if (timestamp > pendingSeek.targetTime && chunkIndex !== null && chunkIndex > 0) {
12501
12522
  m3uState.setNextSeekShouldSubtractChunks(playlistUrl, subtractChunks + 1);
12502
12523
  parentController.seek(pendingSeek.targetTime);
@@ -13230,17 +13251,14 @@ var getAudioSampleFromCbr = ({
13230
13251
  const nthFrame = Math.round((initialOffset - state.mediaSection.getMediaSectionAssertOnlyOne().start) / avgLength);
13231
13252
  const durationInSeconds = samplesPerFrame / sampleRate;
13232
13253
  const timeInSeconds = nthFrame * samplesPerFrame / sampleRate;
13233
- const timestamp = Math.round(timeInSeconds * 1e6);
13234
- const duration2 = Math.round(durationInSeconds * 1e6);
13254
+ const timestamp = Math.round(timeInSeconds * WEBCODECS_TIMESCALE);
13255
+ const duration2 = Math.round(durationInSeconds * WEBCODECS_TIMESCALE);
13235
13256
  const audioSample = {
13236
13257
  data,
13237
- cts: timestamp,
13238
- dts: timestamp,
13258
+ decodingTimestamp: timestamp,
13239
13259
  duration: duration2,
13240
13260
  offset: initialOffset,
13241
- timescale: 1e6,
13242
13261
  timestamp,
13243
- trackId: 0,
13244
13262
  type: "key"
13245
13263
  };
13246
13264
  return { audioSample, timeInSeconds, durationInSeconds };
@@ -13277,17 +13295,14 @@ var getAudioSampleFromVbr = ({
13277
13295
  tableOfContents: info.xingData.tableOfContents
13278
13296
  });
13279
13297
  const durationInSeconds = samplesPerFrame / info.xingData.sampleRate;
13280
- const timestamp = Math.round(timeInSeconds * 1e6);
13281
- const duration2 = Math.round(durationInSeconds * 1e6);
13298
+ const timestamp = Math.round(timeInSeconds * WEBCODECS_TIMESCALE);
13299
+ const duration2 = Math.round(durationInSeconds * WEBCODECS_TIMESCALE);
13282
13300
  const audioSample = {
13283
13301
  data,
13284
- cts: timestamp,
13285
- dts: timestamp,
13302
+ decodingTimestamp: timestamp,
13286
13303
  duration: duration2,
13287
13304
  offset: position,
13288
- timescale: 1e6,
13289
13305
  timestamp,
13290
- trackId: 0,
13291
13306
  type: "key"
13292
13307
  };
13293
13308
  return { timeInSeconds, audioSample, durationInSeconds };
@@ -13360,9 +13375,10 @@ var parseMpegHeader = async ({
13360
13375
  description: undefined,
13361
13376
  numberOfChannels,
13362
13377
  sampleRate,
13363
- timescale: 1e6,
13378
+ originalTimescale: 1e6,
13364
13379
  trackId: 0,
13365
- startInSeconds: 0
13380
+ startInSeconds: 0,
13381
+ timescale: WEBCODECS_TIMESCALE
13366
13382
  },
13367
13383
  registerAudioSampleCallback: state.callbacks.registerAudioSampleCallback,
13368
13384
  tracks: state.callbacks.tracks,
@@ -13399,7 +13415,10 @@ var parseMpegHeader = async ({
13399
13415
  offset: initialOffset,
13400
13416
  durationInSeconds
13401
13417
  });
13402
- await state.callbacks.onAudioSample(0, audioSample);
13418
+ await state.callbacks.onAudioSample({
13419
+ audioSample,
13420
+ trackId: 0
13421
+ });
13403
13422
  };
13404
13423
 
13405
13424
  // src/containers/mp3/seek/wait-until-syncword.ts
@@ -13475,11 +13494,12 @@ var getStrhForIndex = (structure, trackId) => {
13475
13494
  // src/containers/riff/convert-queued-sample-to-mediaparser-sample.ts
13476
13495
  var getKeyFrameOffsetAndPocs = ({
13477
13496
  state,
13478
- sample
13497
+ sample,
13498
+ trackId
13479
13499
  }) => {
13480
13500
  if (sample.type === "key") {
13481
13501
  const sampleOffset = state.riff.sampleCounter.getSampleCountForTrack({
13482
- trackId: sample.trackId
13502
+ trackId
13483
13503
  });
13484
13504
  return {
13485
13505
  sampleOffsetAtKeyframe: sampleOffset,
@@ -13491,7 +13511,7 @@ var getKeyFrameOffsetAndPocs = ({
13491
13511
  if (!keyframeAtOffset) {
13492
13512
  throw new Error("no keyframe at offset");
13493
13513
  }
13494
- const sampleOffsetAtKeyframe = keyframeAtOffset.sampleCounts[sample.trackId];
13514
+ const sampleOffsetAtKeyframe = keyframeAtOffset.sampleCounts[trackId];
13495
13515
  const pocsAtKeyframeOffset = state.riff.sampleCounter.getPocAtKeyframeOffset({
13496
13516
  keyframeOffset: keyframeAtOffset.positionInBytes
13497
13517
  });
@@ -13500,12 +13520,17 @@ var getKeyFrameOffsetAndPocs = ({
13500
13520
  pocsAtKeyframeOffset
13501
13521
  };
13502
13522
  };
13503
- var convertQueuedSampleToMediaParserSample = (sample, state) => {
13504
- const strh = getStrhForIndex(state.structure.getRiffStructure(), sample.trackId);
13523
+ var convertQueuedSampleToMediaParserSample = ({
13524
+ sample,
13525
+ state,
13526
+ trackId
13527
+ }) => {
13528
+ const strh = getStrhForIndex(state.structure.getRiffStructure(), trackId);
13505
13529
  const samplesPerSecond = strh.rate / strh.scale;
13506
13530
  const { sampleOffsetAtKeyframe, pocsAtKeyframeOffset } = getKeyFrameOffsetAndPocs({
13507
13531
  sample,
13508
- state
13532
+ state,
13533
+ trackId
13509
13534
  });
13510
13535
  const indexOfPoc = pocsAtKeyframeOffset.findIndex((poc) => poc === sample.avc?.poc);
13511
13536
  if (indexOfPoc === -1) {
@@ -13517,8 +13542,7 @@ var convertQueuedSampleToMediaParserSample = (sample, state) => {
13517
13542
  sample: {
13518
13543
  ...sample,
13519
13544
  timestamp,
13520
- cts: timestamp,
13521
- dts: timestamp
13545
+ decodingTimestamp: timestamp
13522
13546
  },
13523
13547
  timescale: 1
13524
13548
  });
@@ -13928,10 +13952,8 @@ var handleChunk = async ({
13928
13952
  const rawSample = {
13929
13953
  data,
13930
13954
  duration: 1 / samplesPerSecond,
13931
- trackId,
13932
13955
  type: keyOrDelta === "bidirectional" ? "delta" : keyOrDelta,
13933
13956
  offset,
13934
- timescale: samplesPerSecond,
13935
13957
  avc: info
13936
13958
  };
13937
13959
  const maxFramesInBuffer = state.avc.getMaxFramesInBuffer();
@@ -13948,14 +13970,29 @@ var handleChunk = async ({
13948
13970
  poc: info.poc
13949
13971
  });
13950
13972
  }
13951
- state.riff.queuedBFrames.addFrame(rawSample, maxFramesInBuffer);
13973
+ state.riff.queuedBFrames.addFrame({
13974
+ frame: rawSample,
13975
+ trackId,
13976
+ maxFramesInBuffer,
13977
+ timescale: samplesPerSecond
13978
+ });
13952
13979
  const releasedFrame = state.riff.queuedBFrames.getReleasedFrame();
13953
13980
  if (!releasedFrame) {
13954
13981
  return;
13955
13982
  }
13956
- const videoSample = convertQueuedSampleToMediaParserSample(releasedFrame, state);
13957
- state.riff.sampleCounter.onVideoSample(videoSample);
13958
- await state.callbacks.onVideoSample(trackId, videoSample);
13983
+ const videoSample = convertQueuedSampleToMediaParserSample({
13984
+ sample: releasedFrame.sample,
13985
+ state,
13986
+ trackId: releasedFrame.trackId
13987
+ });
13988
+ state.riff.sampleCounter.onVideoSample({
13989
+ trackId,
13990
+ videoSample
13991
+ });
13992
+ await state.callbacks.onVideoSample({
13993
+ videoSample,
13994
+ trackId
13995
+ });
13959
13996
  }
13960
13997
  const audioChunk = ckId.match(/^([0-9]{2})wb$/);
13961
13998
  if (audioChunk) {
@@ -13970,24 +14007,21 @@ var handleChunk = async ({
13970
14007
  trackId
13971
14008
  });
13972
14009
  const timeInSec = nthSample / samplesPerSecond;
13973
- const timestamp = timeInSec;
14010
+ const timestamp = Math.round(timeInSec * WEBCODECS_TIMESCALE);
13974
14011
  const data = iterator.getSlice(ckSize);
13975
- const audioSample = convertAudioOrVideoSampleToWebCodecsTimestamps({
13976
- sample: {
13977
- cts: timestamp,
13978
- dts: timestamp,
13979
- data,
13980
- duration: undefined,
13981
- timestamp,
13982
- trackId,
13983
- type: "key",
13984
- offset,
13985
- timescale: samplesPerSecond
13986
- },
13987
- timescale: 1
13988
- });
14012
+ const audioSample = {
14013
+ decodingTimestamp: timestamp,
14014
+ data,
14015
+ duration: undefined,
14016
+ timestamp,
14017
+ type: "key",
14018
+ offset
14019
+ };
13989
14020
  state.riff.sampleCounter.onAudioSample(trackId, audioSample);
13990
- await state.callbacks.onAudioSample(trackId, audioSample);
14021
+ await state.callbacks.onAudioSample({
14022
+ audioSample,
14023
+ trackId
14024
+ });
13991
14025
  }
13992
14026
  };
13993
14027
  var parseMovi = async ({
@@ -14030,9 +14064,19 @@ var parseMediaSection = async (state) => {
14030
14064
  var parseRiffBody = async (state) => {
14031
14065
  const releasedFrame = state.riff.queuedBFrames.getReleasedFrame();
14032
14066
  if (releasedFrame) {
14033
- const converted = convertQueuedSampleToMediaParserSample(releasedFrame, state);
14034
- state.riff.sampleCounter.onVideoSample(converted);
14035
- await state.callbacks.onVideoSample(releasedFrame.trackId, converted);
14067
+ const converted = convertQueuedSampleToMediaParserSample({
14068
+ sample: releasedFrame.sample,
14069
+ state,
14070
+ trackId: releasedFrame.trackId
14071
+ });
14072
+ state.riff.sampleCounter.onVideoSample({
14073
+ trackId: releasedFrame.trackId,
14074
+ videoSample: converted
14075
+ });
14076
+ await state.callbacks.onVideoSample({
14077
+ videoSample: converted,
14078
+ trackId: releasedFrame.trackId
14079
+ });
14036
14080
  return null;
14037
14081
  }
14038
14082
  if (state.mediaSection.isCurrentByteInMediaSection(state.iterator) === "in-section") {
@@ -14410,13 +14454,14 @@ var handleAacPacket = async ({
14410
14454
  type: "audio",
14411
14455
  codecData: { type: "aac-config", data: codecPrivate2 },
14412
14456
  trackId: programId,
14413
- timescale: MPEG_TIMESCALE,
14457
+ originalTimescale: MPEG_TIMESCALE,
14414
14458
  codecEnum: "aac",
14415
14459
  codec: mapAudioObjectTypeToCodecString(audioObjectType),
14416
14460
  description: codecPrivate2,
14417
14461
  numberOfChannels: channelConfiguration,
14418
14462
  sampleRate,
14419
- startInSeconds: 0
14463
+ startInSeconds: 0,
14464
+ timescale: WEBCODECS_TIMESCALE
14420
14465
  };
14421
14466
  await registerAudioTrack({
14422
14467
  track,
@@ -14428,21 +14473,21 @@ var handleAacPacket = async ({
14428
14473
  });
14429
14474
  }
14430
14475
  const sample = {
14431
- cts: streamBuffer.pesHeader.pts - transportStream.startOffset.getOffset(programId),
14432
- dts: (streamBuffer.pesHeader.dts ?? streamBuffer.pesHeader.pts) - transportStream.startOffset.getOffset(programId),
14476
+ decodingTimestamp: (streamBuffer.pesHeader.dts ?? streamBuffer.pesHeader.pts) - transportStream.startOffset.getOffset(programId),
14433
14477
  timestamp: streamBuffer.pesHeader.pts - transportStream.startOffset.getOffset(programId),
14434
14478
  duration: undefined,
14435
14479
  data: streamBuffer.getBuffer(),
14436
- trackId: programId,
14437
14480
  type: "key",
14438
- offset,
14439
- timescale: MPEG_TIMESCALE
14481
+ offset
14440
14482
  };
14441
14483
  const audioSample = convertAudioOrVideoSampleToWebCodecsTimestamps({
14442
14484
  sample,
14443
14485
  timescale: MPEG_TIMESCALE
14444
14486
  });
14445
- await sampleCallbacks.onAudioSample(programId, audioSample);
14487
+ await sampleCallbacks.onAudioSample({
14488
+ audioSample,
14489
+ trackId: programId
14490
+ });
14446
14491
  transportStream.lastEmittedSample.setLastEmittedSample(sample);
14447
14492
  };
14448
14493
 
@@ -14933,9 +14978,10 @@ var parseFmt = async ({
14933
14978
  codecEnum: format,
14934
14979
  numberOfChannels,
14935
14980
  sampleRate,
14936
- timescale: 1e6,
14981
+ originalTimescale: 1e6,
14937
14982
  trackId: 0,
14938
- startInSeconds: 0
14983
+ startInSeconds: 0,
14984
+ timescale: WEBCODECS_TIMESCALE
14939
14985
  },
14940
14986
  container: "wav",
14941
14987
  registerAudioSampleCallback: state.callbacks.registerAudioSampleCallback,
@@ -15035,19 +15081,19 @@ var parseMediaSection2 = async ({
15035
15081
  const data = iterator.getSlice(toRead);
15036
15082
  const audioSample = convertAudioOrVideoSampleToWebCodecsTimestamps({
15037
15083
  sample: {
15038
- cts: timestamp,
15039
- dts: timestamp,
15084
+ decodingTimestamp: timestamp,
15040
15085
  data,
15041
15086
  duration: duration2,
15042
15087
  timestamp,
15043
- trackId: 0,
15044
15088
  type: "key",
15045
- offset,
15046
- timescale: 1e6
15089
+ offset
15047
15090
  },
15048
15091
  timescale: 1
15049
15092
  });
15050
- await state.callbacks.onAudioSample(0, audioSample);
15093
+ await state.callbacks.onAudioSample({
15094
+ audioSample,
15095
+ trackId: 0
15096
+ });
15051
15097
  return null;
15052
15098
  };
15053
15099
 
@@ -16149,7 +16195,7 @@ var sampleSorter = ({
16149
16195
  if (!callback) {
16150
16196
  throw new Error("No callback found for audio sample");
16151
16197
  }
16152
- latestSample[src] = sample.dts;
16198
+ latestSample[src] = sample.decodingTimestamp;
16153
16199
  await callback(sample);
16154
16200
  },
16155
16201
  addVideoSample: async (src, sample) => {
@@ -16157,7 +16203,7 @@ var sampleSorter = ({
16157
16203
  if (!callback) {
16158
16204
  throw new Error("No callback found for video sample.");
16159
16205
  }
16160
- latestSample[src] = sample.dts;
16206
+ latestSample[src] = sample.decodingTimestamp;
16161
16207
  await callback(sample);
16162
16208
  },
16163
16209
  getNextStreamToRun: (streams) => {
@@ -16652,7 +16698,10 @@ var fetchIdx1 = async ({
16652
16698
  logLevel,
16653
16699
  prefetchCache
16654
16700
  });
16655
- const iterator = getArrayBufferIterator(new Uint8Array, Infinity);
16701
+ if (result.contentLength === null) {
16702
+ throw new Error("Content length is null");
16703
+ }
16704
+ const iterator = getArrayBufferIterator(new Uint8Array, result.contentLength - position + 1);
16656
16705
  while (true) {
16657
16706
  const res = await result.reader.reader.read();
16658
16707
  if (res.value) {
@@ -16752,13 +16801,18 @@ var queuedBFramesState = () => {
16752
16801
  queuedFrames.length = 0;
16753
16802
  };
16754
16803
  return {
16755
- addFrame: (frame, maxFramesInBuffer) => {
16804
+ addFrame: ({
16805
+ frame,
16806
+ maxFramesInBuffer,
16807
+ trackId,
16808
+ timescale
16809
+ }) => {
16756
16810
  if (frame.type === "key") {
16757
16811
  flush();
16758
- releasedFrames.push(frame);
16812
+ releasedFrames.push({ sample: frame, trackId, timescale });
16759
16813
  return;
16760
16814
  }
16761
- queuedFrames.push(frame);
16815
+ queuedFrames.push({ sample: frame, trackId, timescale });
16762
16816
  if (queuedFrames.length > maxFramesInBuffer) {
16763
16817
  releasedFrames.push(queuedFrames.shift());
16764
16818
  }
@@ -16819,22 +16873,25 @@ var riffSampleCounter = () => {
16819
16873
  }
16820
16874
  samplesForTrack[trackId]++;
16821
16875
  };
16822
- const onVideoSample = (videoSample) => {
16823
- if (typeof samplesForTrack[videoSample.trackId] === "undefined") {
16824
- samplesForTrack[videoSample.trackId] = 0;
16876
+ const onVideoSample = ({
16877
+ trackId,
16878
+ videoSample
16879
+ }) => {
16880
+ if (typeof samplesForTrack[trackId] === "undefined") {
16881
+ samplesForTrack[trackId] = 0;
16825
16882
  }
16826
16883
  if (videoSample.type === "key") {
16827
16884
  riffKeys.addKeyframe({
16828
- trackId: videoSample.trackId,
16829
- decodingTimeInSeconds: videoSample.dts / videoSample.timescale,
16885
+ trackId,
16886
+ decodingTimeInSeconds: videoSample.decodingTimestamp / WEBCODECS_TIMESCALE,
16830
16887
  positionInBytes: videoSample.offset,
16831
- presentationTimeInSeconds: videoSample.cts / videoSample.timescale,
16888
+ presentationTimeInSeconds: videoSample.timestamp / WEBCODECS_TIMESCALE,
16832
16889
  sizeInBytes: videoSample.data.length,
16833
16890
  sampleCounts: { ...samplesForTrack }
16834
16891
  });
16835
16892
  }
16836
16893
  if (videoSample.data.length > 0) {
16837
- samplesForTrack[videoSample.trackId]++;
16894
+ samplesForTrack[trackId]++;
16838
16895
  }
16839
16896
  };
16840
16897
  const getSampleCountForTrack = ({ trackId }) => {
@@ -16966,7 +17023,10 @@ var callbacksState = ({
16966
17023
  }
16967
17024
  queuedVideoSamples[id] = [];
16968
17025
  },
16969
- onAudioSample: async (trackId, audioSample) => {
17026
+ onAudioSample: async ({
17027
+ audioSample,
17028
+ trackId
17029
+ }) => {
16970
17030
  if (controller._internals.signal.aborted) {
16971
17031
  throw new Error("Aborted");
16972
17032
  }
@@ -16984,7 +17044,10 @@ var callbacksState = ({
16984
17044
  samplesObserved.addAudioSample(audioSample);
16985
17045
  }
16986
17046
  },
16987
- onVideoSample: async (trackId, videoSample) => {
17047
+ onVideoSample: async ({
17048
+ trackId,
17049
+ videoSample
17050
+ }) => {
16988
17051
  if (controller._internals.signal.aborted) {
16989
17052
  throw new Error("Aborted");
16990
17053
  }
@@ -17001,9 +17064,9 @@ var callbacksState = ({
17001
17064
  if (videoSample.type === "key") {
17002
17065
  keyframes.addKeyframe({
17003
17066
  trackId,
17004
- decodingTimeInSeconds: videoSample.dts / videoSample.timescale,
17067
+ decodingTimeInSeconds: videoSample.decodingTimestamp / WEBCODECS_TIMESCALE,
17005
17068
  positionInBytes: videoSample.offset,
17006
- presentationTimeInSeconds: videoSample.cts / videoSample.timescale,
17069
+ presentationTimeInSeconds: videoSample.timestamp / WEBCODECS_TIMESCALE,
17007
17070
  sizeInBytes: videoSample.data.length
17008
17071
  });
17009
17072
  }
@@ -17058,9 +17121,9 @@ var samplesObservedState = () => {
17058
17121
  return largestSample - smallestSample;
17059
17122
  };
17060
17123
  const addVideoSample = (videoSample) => {
17061
- videoSamples.set(videoSample.cts, videoSample.data.byteLength);
17062
- const presentationTimeInSeconds = videoSample.cts / videoSample.timescale;
17063
- const duration2 = (videoSample.duration ?? 0) / videoSample.timescale;
17124
+ videoSamples.set(videoSample.timestamp, videoSample.data.byteLength);
17125
+ const presentationTimeInSeconds = videoSample.timestamp / WEBCODECS_TIMESCALE;
17126
+ const duration2 = (videoSample.duration ?? 0) / WEBCODECS_TIMESCALE;
17064
17127
  if (largestVideoSample === undefined || presentationTimeInSeconds > largestVideoSample) {
17065
17128
  largestVideoSample = presentationTimeInSeconds + duration2;
17066
17129
  }
@@ -17069,9 +17132,9 @@ var samplesObservedState = () => {
17069
17132
  }
17070
17133
  };
17071
17134
  const addAudioSample = (audioSample) => {
17072
- audioSamples.set(audioSample.cts, audioSample.data.byteLength);
17073
- const presentationTimeInSeconds = audioSample.cts / audioSample.timescale;
17074
- const duration2 = (audioSample.duration ?? 0) / audioSample.timescale;
17135
+ audioSamples.set(audioSample.timestamp, audioSample.data.byteLength);
17136
+ const presentationTimeInSeconds = audioSample.timestamp / WEBCODECS_TIMESCALE;
17137
+ const duration2 = (audioSample.duration ?? 0) / WEBCODECS_TIMESCALE;
17075
17138
  if (largestAudioSample === undefined || presentationTimeInSeconds > largestAudioSample) {
17076
17139
  largestAudioSample = presentationTimeInSeconds + duration2;
17077
17140
  }
@@ -17439,6 +17502,9 @@ var internalParseMedia = async function({
17439
17502
  seekingHints,
17440
17503
  ...more
17441
17504
  }) {
17505
+ if (!src) {
17506
+ throw new Error('No "src" provided');
17507
+ }
17442
17508
  controller._internals.markAsReadyToEmitEvents();
17443
17509
  warnIfRemotionLicenseNotAcknowledged({
17444
17510
  acknowledgeRemotionLicense,
@@ -17541,6 +17607,9 @@ var internalParseMedia = async function({
17541
17607
  };
17542
17608
  // src/download-and-parse-media.ts
17543
17609
  var downloadAndParseMedia = async (options) => {
17610
+ if (!options) {
17611
+ return Promise.reject(new Error("No options provided. See https://www.remotion.dev/media-parser for how to get started."));
17612
+ }
17544
17613
  const logLevel = options.logLevel ?? "info";
17545
17614
  const content = await options.writer.createContent({
17546
17615
  filename: "hmm",
@@ -17610,7 +17679,7 @@ var downloadAndParseMedia = async (options) => {
17610
17679
  return returnValue;
17611
17680
  };
17612
17681
  // src/version.ts
17613
- var VERSION = "4.0.303";
17682
+ var VERSION = "4.0.305";
17614
17683
 
17615
17684
  // src/index.ts
17616
17685
  var MediaParserInternals = {
@@ -17636,6 +17705,7 @@ export {
17636
17705
  downloadAndParseMedia,
17637
17706
  defaultSelectM3uStreamFn,
17638
17707
  defaultSelectM3uAssociatedPlaylists,
17708
+ WEBCODECS_TIMESCALE,
17639
17709
  VERSION,
17640
17710
  MediaParserInternals,
17641
17711
  MediaParserAbortError,