@remotion/media-parser 4.0.303 → 4.0.305

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (65) hide show
  1. package/dist/containers/aac/parse-aac.js +8 -6
  2. package/dist/containers/flac/parse-flac-frame.js +6 -6
  3. package/dist/containers/flac/parse-streaminfo.js +3 -1
  4. package/dist/containers/iso-base-media/find-keyframe-before-time.js +2 -2
  5. package/dist/containers/iso-base-media/get-keyframes.js +3 -3
  6. package/dist/containers/iso-base-media/get-sample-position-bounds.js +2 -2
  7. package/dist/containers/iso-base-media/get-seeking-byte-from-fragmented-mp4.js +3 -3
  8. package/dist/containers/iso-base-media/get-seeking-byte.js +1 -1
  9. package/dist/containers/iso-base-media/make-track.js +7 -3
  10. package/dist/containers/iso-base-media/mdat/calculate-jump-marks.js +3 -3
  11. package/dist/containers/iso-base-media/mdat/mdat.js +16 -16
  12. package/dist/containers/iso-base-media/parse-icc-profile.js +2 -2
  13. package/dist/containers/m3u/first-sample-in-m3u-chunk.js +2 -1
  14. package/dist/containers/m3u/sample-sorter.js +2 -2
  15. package/dist/containers/mp3/parse-mpeg-header.js +7 -2
  16. package/dist/containers/mp3/seek/audio-sample-from-cbr.js +4 -6
  17. package/dist/containers/mp3/seek/audio-sample-from-vbr.js +4 -6
  18. package/dist/containers/riff/convert-queued-sample-to-mediaparser-sample.d.ts +6 -5
  19. package/dist/containers/riff/convert-queued-sample-to-mediaparser-sample.js +7 -7
  20. package/dist/containers/riff/get-tracks-from-avi.js +5 -2
  21. package/dist/containers/riff/parse-movi.js +35 -25
  22. package/dist/containers/riff/parse-riff-body.js +13 -3
  23. package/dist/containers/riff/seek/fetch-idx1.js +4 -1
  24. package/dist/containers/transport-stream/handle-aac-packet.js +8 -7
  25. package/dist/containers/transport-stream/handle-avc-packet.js +8 -7
  26. package/dist/containers/wav/parse-fmt.js +3 -1
  27. package/dist/containers/wav/parse-media-section.js +5 -5
  28. package/dist/containers/webm/get-sample-from-block.d.ts +6 -0
  29. package/dist/containers/webm/get-sample-from-block.js +9 -8
  30. package/dist/containers/webm/make-track.js +5 -2
  31. package/dist/containers/webm/parse-ebml.js +12 -3
  32. package/dist/containers/webm/seek/seeking-hints.js +1 -1
  33. package/dist/convert-audio-or-video-sample.js +6 -9
  34. package/dist/download-and-parse-media.js +3 -0
  35. package/dist/esm/index.mjs +266 -196
  36. package/dist/esm/worker-server-entry.mjs +262 -196
  37. package/dist/esm/worker-web-entry.mjs +262 -196
  38. package/dist/get-duration.js +2 -2
  39. package/dist/get-sample-positions-from-mp4.js +2 -2
  40. package/dist/get-sample-positions.d.ts +2 -2
  41. package/dist/get-sample-positions.js +2 -2
  42. package/dist/get-tracks.d.ts +6 -3
  43. package/dist/index.d.ts +24 -5
  44. package/dist/index.js +3 -1
  45. package/dist/internal-parse-media.js +3 -0
  46. package/dist/iterator/buffer-manager.js +1 -1
  47. package/dist/parse-media.js +3 -0
  48. package/dist/samples-from-moof.js +2 -2
  49. package/dist/state/iso-base-media/cached-sample-positions.d.ts +2 -2
  50. package/dist/state/parser-state.d.ts +23 -5
  51. package/dist/state/riff/queued-frames.d.ts +14 -3
  52. package/dist/state/riff/queued-frames.js +3 -3
  53. package/dist/state/riff/sample-counter.d.ts +4 -1
  54. package/dist/state/riff/sample-counter.js +8 -7
  55. package/dist/state/riff.d.ts +15 -3
  56. package/dist/state/sample-callbacks.d.ts +8 -2
  57. package/dist/state/sample-callbacks.js +5 -4
  58. package/dist/state/samples-observed/slow-duration-fps.js +7 -6
  59. package/dist/version.d.ts +1 -1
  60. package/dist/version.js +1 -1
  61. package/dist/webcodec-sample-types.d.ts +3 -9
  62. package/dist/webcodecs-timescale.d.ts +1 -0
  63. package/dist/webcodecs-timescale.js +4 -0
  64. package/dist/worker-server.js +2 -1
  65. package/package.json +3 -3
@@ -2328,7 +2328,7 @@ var bufferManager = ({
2328
2328
  counter
2329
2329
  }) => {
2330
2330
  const buf = new ArrayBuffer(initialData.byteLength, {
2331
- maxByteLength: maxBytes === null ? initialData.byteLength : Math.min(maxBytes, 2 ** 32)
2331
+ maxByteLength: maxBytes === null ? initialData.byteLength : Math.min(maxBytes, 2 ** 31)
2332
2332
  });
2333
2333
  if (!buf.resize) {
2334
2334
  throw new Error("`ArrayBuffer.resize` is not supported in this Runtime. On the server: Use at least Node.js 20 or Bun. In the browser: Chrome 111, Edge 111, Safari 16.4, Firefox 128, Opera 111");
@@ -2999,6 +2999,9 @@ var getVideoCodecString = (trakBox) => {
2999
2999
  return videoSample.format;
3000
3000
  };
3001
3001
 
3002
+ // src/webcodecs-timescale.ts
3003
+ var WEBCODECS_TIMESCALE = 1e6;
3004
+
3002
3005
  // src/containers/iso-base-media/color-to-webcodecs-colors.ts
3003
3006
  var mediaParserAdvancedColorToWebCodecsColor = (color2) => {
3004
3007
  return {
@@ -3268,23 +3271,25 @@ var makeBaseMediaTrack = (trakBox, startTimeInSeconds) => {
3268
3271
  return {
3269
3272
  type: "audio",
3270
3273
  trackId: tkhdBox.trackId,
3271
- timescale: timescaleAndDuration.timescale,
3274
+ originalTimescale: timescaleAndDuration.timescale,
3272
3275
  codec: codecString,
3273
3276
  numberOfChannels: actual.numberOfChannels,
3274
3277
  sampleRate: actual.sampleRate,
3275
3278
  description: actual.codecPrivate?.data ?? undefined,
3276
3279
  codecData: actual.codecPrivate,
3277
3280
  codecEnum,
3278
- startInSeconds: startTimeInSeconds
3281
+ startInSeconds: startTimeInSeconds,
3282
+ timescale: WEBCODECS_TIMESCALE
3279
3283
  };
3280
3284
  }
3281
3285
  if (!trakBoxContainsVideo(trakBox)) {
3282
3286
  return {
3283
3287
  type: "other",
3284
3288
  trackId: tkhdBox.trackId,
3285
- timescale: timescaleAndDuration.timescale,
3289
+ originalTimescale: timescaleAndDuration.timescale,
3286
3290
  trakBox,
3287
- startInSeconds: startTimeInSeconds
3291
+ startInSeconds: startTimeInSeconds,
3292
+ timescale: WEBCODECS_TIMESCALE
3288
3293
  };
3289
3294
  }
3290
3295
  const videoSample = getStsdVideoConfig(trakBox);
@@ -3317,7 +3322,7 @@ var makeBaseMediaTrack = (trakBox, startTimeInSeconds) => {
3317
3322
  type: "video",
3318
3323
  trackId: tkhdBox.trackId,
3319
3324
  description: videoDescriptors ?? undefined,
3320
- timescale: timescaleAndDuration.timescale,
3325
+ originalTimescale: timescaleAndDuration.timescale,
3321
3326
  codec,
3322
3327
  sampleAspectRatio: getSampleAspectRatio(trakBox),
3323
3328
  width,
@@ -3332,7 +3337,8 @@ var makeBaseMediaTrack = (trakBox, startTimeInSeconds) => {
3332
3337
  advancedColor,
3333
3338
  codecEnum: getVideoCodecFromIsoTrak(trakBox),
3334
3339
  fps: getFpsFromMp4TrakBox(trakBox),
3335
- startInSeconds: startTimeInSeconds
3340
+ startInSeconds: startTimeInSeconds,
3341
+ timescale: WEBCODECS_TIMESCALE
3336
3342
  };
3337
3343
  return track;
3338
3344
  };
@@ -3455,9 +3461,10 @@ var makeAviAudioTrack = ({
3455
3461
  description: new Uint8Array([18, 16]),
3456
3462
  numberOfChannels: strf.numberOfChannels,
3457
3463
  sampleRate: strf.sampleRate,
3458
- timescale: MEDIA_PARSER_RIFF_TIMESCALE,
3464
+ originalTimescale: MEDIA_PARSER_RIFF_TIMESCALE,
3459
3465
  trackId: index,
3460
- startInSeconds: 0
3466
+ startInSeconds: 0,
3467
+ timescale: WEBCODECS_TIMESCALE
3461
3468
  };
3462
3469
  };
3463
3470
  var makeAviVideoTrack = ({
@@ -3478,7 +3485,7 @@ var makeAviVideoTrack = ({
3478
3485
  height: strf.height,
3479
3486
  type: "video",
3480
3487
  displayAspectHeight: strf.height,
3481
- timescale: MEDIA_PARSER_RIFF_TIMESCALE,
3488
+ originalTimescale: MEDIA_PARSER_RIFF_TIMESCALE,
3482
3489
  description: undefined,
3483
3490
  m3uStreamFormat: null,
3484
3491
  trackId: index,
@@ -3501,7 +3508,8 @@ var makeAviVideoTrack = ({
3501
3508
  denominator: 1
3502
3509
  },
3503
3510
  fps: strh.rate / strh.scale,
3504
- startInSeconds: 0
3511
+ startInSeconds: 0,
3512
+ timescale: WEBCODECS_TIMESCALE
3505
3513
  };
3506
3514
  };
3507
3515
  var getTracksFromAvi = (structure, state) => {
@@ -4144,7 +4152,7 @@ var getTrack = ({
4144
4152
  numerator: 1,
4145
4153
  denominator: 1
4146
4154
  },
4147
- timescale,
4155
+ originalTimescale: timescale,
4148
4156
  codedHeight: height.value.value,
4149
4157
  codedWidth: width.value.value,
4150
4158
  displayAspectHeight: displayHeight2 ? displayHeight2.value.value : height.value.value,
@@ -4155,7 +4163,8 @@ var getTrack = ({
4155
4163
  advancedColor,
4156
4164
  codecEnum,
4157
4165
  fps: null,
4158
- startInSeconds: 0
4166
+ startInSeconds: 0,
4167
+ timescale: WEBCODECS_TIMESCALE
4159
4168
  };
4160
4169
  }
4161
4170
  if (trackTypeToString(trackType2.value.value) === "audio") {
@@ -4170,7 +4179,7 @@ var getTrack = ({
4170
4179
  type: "audio",
4171
4180
  trackId,
4172
4181
  codec: codecString,
4173
- timescale,
4182
+ originalTimescale: timescale,
4174
4183
  numberOfChannels,
4175
4184
  sampleRate,
4176
4185
  description: getAudioDescription(track),
@@ -4178,7 +4187,8 @@ var getTrack = ({
4178
4187
  codecEnum: getMatroskaAudioCodecEnum({
4179
4188
  track
4180
4189
  }),
4181
- startInSeconds: 0
4190
+ startInSeconds: 0,
4191
+ timescale: WEBCODECS_TIMESCALE
4182
4192
  };
4183
4193
  }
4184
4194
  return null;
@@ -4738,8 +4748,8 @@ var getSamplesFromTraf = (trafSegment, moofOffset) => {
4738
4748
  const dts = time + (tfdtBox?.baseMediaDecodeTime ?? 0);
4739
4749
  const samplePosition = {
4740
4750
  offset: offset + (moofOffset ?? 0) + (dataOffset ?? 0),
4741
- dts,
4742
- cts: dts + (sample.sampleCompositionTimeOffset ?? 0),
4751
+ decodingTimestamp: dts,
4752
+ timestamp: dts + (sample.sampleCompositionTimeOffset ?? 0),
4743
4753
  duration: duration2,
4744
4754
  isKeyframe: keyframe,
4745
4755
  size,
@@ -4830,8 +4840,8 @@ var getSamplePositions = ({
4830
4840
  offset: Number(chunks[i]) + offsetInThisChunk,
4831
4841
  size,
4832
4842
  isKeyframe,
4833
- dts,
4834
- cts,
4843
+ decodingTimestamp: dts,
4844
+ timestamp: cts,
4835
4845
  duration: delta,
4836
4846
  chunk: i,
4837
4847
  bigEndian: false,
@@ -4880,8 +4890,8 @@ var getGroupedSamplesPositionsFromMp4 = ({
4880
4890
  }
4881
4891
  samples.push({
4882
4892
  chunk,
4883
- cts: timestamp,
4884
- dts: timestamp,
4893
+ timestamp,
4894
+ decodingTimestamp: timestamp,
4885
4895
  offset: Number(entry),
4886
4896
  size: stszBox.sampleSize * samplesPerChunk,
4887
4897
  duration: samplesPerChunk,
@@ -5241,7 +5251,7 @@ var getDurationFromIsoBaseMedia = (parserState) => {
5241
5251
  }
5242
5252
  const tracks2 = getTracks(parserState, true);
5243
5253
  const allSamples = tracks2.map((t) => {
5244
- const { timescale: ts } = t;
5254
+ const { originalTimescale: ts } = t;
5245
5255
  const trakBox = getTrakBoxByTrackId(moovBox, t.trackId);
5246
5256
  if (!trakBox) {
5247
5257
  return null;
@@ -5257,7 +5267,7 @@ var getDurationFromIsoBaseMedia = (parserState) => {
5257
5267
  if (samplePositions.length === 0) {
5258
5268
  return null;
5259
5269
  }
5260
- const highest = samplePositions?.map((sp) => (sp.cts + sp.duration) / ts).reduce((a, b) => Math.max(a, b), 0);
5270
+ const highest = samplePositions?.map((sp) => (sp.timestamp + sp.duration) / ts).reduce((a, b) => Math.max(a, b), 0);
5261
5271
  return highest ?? 0;
5262
5272
  });
5263
5273
  if (allSamples.every((s) => s === null)) {
@@ -5344,7 +5354,7 @@ var getKeyframesFromIsoBaseMedia = (state) => {
5344
5354
  return [];
5345
5355
  }
5346
5356
  const allSamples = videoTracks.map((t) => {
5347
- const { timescale: ts } = t;
5357
+ const { originalTimescale: ts } = t;
5348
5358
  const trakBox = getTrakBoxByTrackId(moov, t.trackId);
5349
5359
  if (!trakBox) {
5350
5360
  return [];
@@ -5365,8 +5375,8 @@ var getKeyframesFromIsoBaseMedia = (state) => {
5365
5375
  }).map((k) => {
5366
5376
  return {
5367
5377
  trackId: t.trackId,
5368
- presentationTimeInSeconds: k.cts / ts,
5369
- decodingTimeInSeconds: k.dts / ts,
5378
+ presentationTimeInSeconds: k.timestamp / ts,
5379
+ decodingTimeInSeconds: k.decodingTimestamp / ts,
5370
5380
  positionInBytes: k.offset,
5371
5381
  sizeInBytes: k.size
5372
5382
  };
@@ -5775,8 +5785,8 @@ var findKeyframeBeforeTime = ({
5775
5785
  let videoByte = 0;
5776
5786
  let videoSample = null;
5777
5787
  for (const sample of samplePositions) {
5778
- const ctsInSeconds = sample.cts / timescale + startInSeconds;
5779
- const dtsInSeconds = sample.dts / timescale + startInSeconds;
5788
+ const ctsInSeconds = sample.timestamp / timescale + startInSeconds;
5789
+ const dtsInSeconds = sample.decodingTimestamp / timescale + startInSeconds;
5780
5790
  if (!sample.isKeyframe) {
5781
5791
  continue;
5782
5792
  }
@@ -5928,8 +5938,8 @@ var getSamplePositionBounds = (samplePositions, timescale) => {
5928
5938
  let min = Infinity;
5929
5939
  let max = -Infinity;
5930
5940
  for (const samplePosition of samplePositions) {
5931
- const timestampMin = Math.min(samplePosition.cts, samplePosition.dts);
5932
- const timestampMax = Math.max(samplePosition.cts, samplePosition.dts) + (samplePosition.duration ?? 0);
5941
+ const timestampMin = Math.min(samplePosition.timestamp, samplePosition.decodingTimestamp);
5942
+ const timestampMax = Math.max(samplePosition.timestamp, samplePosition.decodingTimestamp) + (samplePosition.duration ?? 0);
5933
5943
  if (timestampMin < min) {
5934
5944
  min = timestampMin;
5935
5945
  }
@@ -6013,13 +6023,13 @@ var getSeekingByteFromFragmentedMp4 = async ({
6013
6023
  });
6014
6024
  Log.trace(logLevel, "Fragmented MP4 - Checking if we have seeking info for this time range");
6015
6025
  for (const positions of samplePositionsArray) {
6016
- const { min, max } = getSamplePositionBounds(positions.samples, firstTrack.timescale);
6026
+ const { min, max } = getSamplePositionBounds(positions.samples, firstTrack.originalTimescale);
6017
6027
  if (min <= time && (positions.isLastFragment || isLastChunkInPlaylist || time <= max)) {
6018
6028
  Log.trace(logLevel, `Fragmented MP4 - Found that we have seeking info for this time range: ${min} <= ${time} <= ${max}`);
6019
6029
  const kf = findKeyframeBeforeTime({
6020
6030
  samplePositions: positions.samples,
6021
6031
  time,
6022
- timescale: firstTrack.timescale,
6032
+ timescale: firstTrack.originalTimescale,
6023
6033
  logLevel,
6024
6034
  mediaSections: info.mediaSections,
6025
6035
  startInSeconds: firstTrack.startInSeconds
@@ -6038,7 +6048,7 @@ var getSeekingByteFromFragmentedMp4 = async ({
6038
6048
  mfra: atom,
6039
6049
  time,
6040
6050
  firstTrack,
6041
- timescale: firstTrack.timescale
6051
+ timescale: firstTrack.originalTimescale
6042
6052
  });
6043
6053
  if (moofOffset !== null && !(moofOffset.start <= currentPosition && currentPosition < moofOffset.end)) {
6044
6054
  Log.verbose(logLevel, `Fragmented MP4 - Found based on mfra information that we should seek to: ${moofOffset.start} ${moofOffset.end}`);
@@ -6122,7 +6132,7 @@ var getSeekingByteFromIsoBaseMedia = ({
6122
6132
  const keyframe = findKeyframeBeforeTime({
6123
6133
  samplePositions,
6124
6134
  time,
6125
- timescale: track.timescale,
6135
+ timescale: track.originalTimescale,
6126
6136
  logLevel,
6127
6137
  mediaSections: info.mediaSections,
6128
6138
  startInSeconds: track.startInSeconds
@@ -6600,7 +6610,6 @@ var getSeekingByteFromMatroska = async ({
6600
6610
  };
6601
6611
 
6602
6612
  // src/convert-audio-or-video-sample.ts
6603
- var TARGET_TIMESCALE = 1e6;
6604
6613
  var fixFloat = (value) => {
6605
6614
  if (value % 1 < 0.0000001) {
6606
6615
  return Math.floor(value);
@@ -6614,20 +6623,17 @@ var convertAudioOrVideoSampleToWebCodecsTimestamps = ({
6614
6623
  sample,
6615
6624
  timescale
6616
6625
  }) => {
6617
- if (timescale === TARGET_TIMESCALE) {
6626
+ if (timescale === WEBCODECS_TIMESCALE) {
6618
6627
  return sample;
6619
6628
  }
6620
- const { cts, dts, timestamp } = sample;
6629
+ const { decodingTimestamp: dts, timestamp } = sample;
6621
6630
  return {
6622
- cts: fixFloat(cts * (TARGET_TIMESCALE / timescale)),
6623
- dts: fixFloat(dts * (TARGET_TIMESCALE / timescale)),
6624
- timestamp: fixFloat(timestamp * (TARGET_TIMESCALE / timescale)),
6625
- duration: sample.duration === undefined ? undefined : fixFloat(sample.duration * (TARGET_TIMESCALE / timescale)),
6631
+ decodingTimestamp: fixFloat(dts * (WEBCODECS_TIMESCALE / timescale)),
6632
+ timestamp: fixFloat(timestamp * (WEBCODECS_TIMESCALE / timescale)),
6633
+ duration: sample.duration === undefined ? undefined : fixFloat(sample.duration * (WEBCODECS_TIMESCALE / timescale)),
6626
6634
  data: sample.data,
6627
- trackId: sample.trackId,
6628
6635
  type: sample.type,
6629
6636
  offset: sample.offset,
6630
- timescale: TARGET_TIMESCALE,
6631
6637
  ..."avc" in sample ? { avc: sample.avc } : {}
6632
6638
  };
6633
6639
  };
@@ -7088,7 +7094,7 @@ var handleAvcPacket = async ({
7088
7094
  rotation: 0,
7089
7095
  trackId: programId,
7090
7096
  type: "video",
7091
- timescale: MPEG_TIMESCALE,
7097
+ originalTimescale: MPEG_TIMESCALE,
7092
7098
  codec: getCodecStringFromSpsAndPps(spsAndPps.sps),
7093
7099
  codecData: { type: "avc-sps-pps", data: codecPrivate2 },
7094
7100
  fps: null,
@@ -7106,7 +7112,8 @@ var handleAvcPacket = async ({
7106
7112
  },
7107
7113
  colorSpace: mediaParserAdvancedColorToWebCodecsColor(advancedColor),
7108
7114
  advancedColor,
7109
- startInSeconds: 0
7115
+ startInSeconds: 0,
7116
+ timescale: WEBCODECS_TIMESCALE
7110
7117
  };
7111
7118
  await registerVideoTrack({
7112
7119
  track,
@@ -7119,15 +7126,12 @@ var handleAvcPacket = async ({
7119
7126
  }
7120
7127
  const type = getKeyFrameOrDeltaFromAvcInfo(avc);
7121
7128
  const sample = {
7122
- cts: streamBuffer.pesHeader.pts - transportStream.startOffset.getOffset(programId),
7123
- dts: (streamBuffer.pesHeader.dts ?? streamBuffer.pesHeader.pts) - transportStream.startOffset.getOffset(programId),
7129
+ decodingTimestamp: (streamBuffer.pesHeader.dts ?? streamBuffer.pesHeader.pts) - transportStream.startOffset.getOffset(programId),
7124
7130
  timestamp: streamBuffer.pesHeader.pts - transportStream.startOffset.getOffset(programId),
7125
7131
  duration: undefined,
7126
7132
  data: streamBuffer.getBuffer(),
7127
- trackId: programId,
7128
7133
  type: type === "bidirectional" ? "delta" : type,
7129
- offset,
7130
- timescale: MPEG_TIMESCALE
7134
+ offset
7131
7135
  };
7132
7136
  if (type === "key") {
7133
7137
  transportStream.observedPesHeaders.markPtsAsKeyframe(streamBuffer.pesHeader.pts);
@@ -7136,7 +7140,10 @@ var handleAvcPacket = async ({
7136
7140
  sample,
7137
7141
  timescale: MPEG_TIMESCALE
7138
7142
  });
7139
- await sampleCallbacks.onVideoSample(programId, videoSample);
7143
+ await sampleCallbacks.onVideoSample({
7144
+ videoSample,
7145
+ trackId: programId
7146
+ });
7140
7147
  transportStream.lastEmittedSample.setLastEmittedSample(sample);
7141
7148
  };
7142
7149
 
@@ -7484,7 +7491,7 @@ var getSeekingHintsFromMatroska = (tracksState, keyframesState, webmState) => {
7484
7491
  return {
7485
7492
  type: "webm-seeking-hints",
7486
7493
  track: firstVideoTrack ? {
7487
- timescale: firstVideoTrack.timescale,
7494
+ timescale: firstVideoTrack.originalTimescale,
7488
7495
  trackId: firstVideoTrack.trackId
7489
7496
  } : null,
7490
7497
  keyframes,
@@ -8613,10 +8620,11 @@ var parseAac = async (state) => {
8613
8620
  description: codecPrivate2,
8614
8621
  numberOfChannels: channelConfiguration,
8615
8622
  sampleRate,
8616
- timescale: 1e6,
8623
+ originalTimescale: WEBCODECS_TIMESCALE,
8617
8624
  trackId: 0,
8618
8625
  type: "audio",
8619
- startInSeconds: 0
8626
+ startInSeconds: 0,
8627
+ timescale: WEBCODECS_TIMESCALE
8620
8628
  },
8621
8629
  registerAudioSampleCallback: state.callbacks.registerAudioSampleCallback,
8622
8630
  tracks: state.callbacks.tracks,
@@ -8639,15 +8647,15 @@ var parseAac = async (state) => {
8639
8647
  type: "key",
8640
8648
  data,
8641
8649
  offset: startOffset,
8642
- timescale: 1e6,
8643
- trackId: 0,
8644
- cts: timestamp,
8645
- dts: timestamp,
8650
+ decodingTimestamp: timestamp,
8646
8651
  timestamp
8647
8652
  },
8648
8653
  timescale: 1
8649
8654
  });
8650
- await state.callbacks.onAudioSample(0, audioSample);
8655
+ await state.callbacks.onAudioSample({
8656
+ audioSample,
8657
+ trackId: 0
8658
+ });
8651
8659
  return Promise.resolve(null);
8652
8660
  };
8653
8661
 
@@ -8825,7 +8833,7 @@ var emitSample = async ({
8825
8833
  data,
8826
8834
  offset
8827
8835
  }) => {
8828
- const iterator = getArrayBufferIterator(data, null);
8836
+ const iterator = getArrayBufferIterator(data, data.length);
8829
8837
  const parsed = parseFrameHeader({ iterator, state });
8830
8838
  if (!parsed) {
8831
8839
  throw new Error("Invalid CRC");
@@ -8850,17 +8858,17 @@ var emitSample = async ({
8850
8858
  sample: {
8851
8859
  data,
8852
8860
  duration: duration2,
8853
- cts: timestamp,
8854
- dts: timestamp,
8861
+ decodingTimestamp: timestamp,
8855
8862
  timestamp,
8856
8863
  type: "key",
8857
- offset,
8858
- timescale: 1,
8859
- trackId: 0
8864
+ offset
8860
8865
  },
8861
8866
  timescale: 1
8862
8867
  });
8863
- await state.callbacks.onAudioSample(0, audioSample);
8868
+ await state.callbacks.onAudioSample({
8869
+ audioSample,
8870
+ trackId: 0
8871
+ });
8864
8872
  iterator.destroy();
8865
8873
  };
8866
8874
  var parseFlacFrame = async ({
@@ -9016,9 +9024,10 @@ var parseStreamInfo = async ({
9016
9024
  codecEnum: "flac",
9017
9025
  numberOfChannels: channels2,
9018
9026
  sampleRate,
9019
- timescale: 1e6,
9027
+ originalTimescale: WEBCODECS_TIMESCALE,
9020
9028
  trackId: 0,
9021
- startInSeconds: 0
9029
+ startInSeconds: 0,
9030
+ timescale: WEBCODECS_TIMESCALE
9022
9031
  },
9023
9032
  registerAudioSampleCallback: state.callbacks.registerAudioSampleCallback,
9024
9033
  tracks: state.callbacks.tracks,
@@ -9934,7 +9943,7 @@ var parseAvcc = ({
9934
9943
 
9935
9944
  // src/containers/iso-base-media/parse-icc-profile.ts
9936
9945
  var parseIccProfile = (data) => {
9937
- const iterator = getArrayBufferIterator(data, Infinity);
9946
+ const iterator = getArrayBufferIterator(data, data.length);
9938
9947
  const size = iterator.getUint32();
9939
9948
  if (size !== data.length) {
9940
9949
  throw new Error("Invalid ICC profile size");
@@ -9979,7 +9988,7 @@ var parseIccProfile = (data) => {
9979
9988
  for (const entry of entries) {
9980
9989
  const found = data.slice(entry.offset, entry.offset + entry.size);
9981
9990
  if (entry.tag === "rXYZ" || entry.tag === "gXYZ" || entry.tag === "bXYZ" || entry.tag === "wtpt") {
9982
- const it = getArrayBufferIterator(found, Infinity);
9991
+ const it = getArrayBufferIterator(found, found.length);
9983
9992
  it.discard(4);
9984
9993
  const x = it.getInt32() / 65536;
9985
9994
  const y = it.getInt32() / 65536;
@@ -11405,7 +11414,7 @@ var getMoovAtom = async ({
11405
11414
  // src/containers/iso-base-media/mdat/calculate-jump-marks.ts
11406
11415
  var MAX_SPREAD_IN_SECONDS = 8;
11407
11416
  var getKey = (samplePositionTrack) => {
11408
- return `${samplePositionTrack.track.trackId}-${samplePositionTrack.samplePosition.dts}`;
11417
+ return `${samplePositionTrack.track.trackId}-${samplePositionTrack.samplePosition.decodingTimestamp}`;
11409
11418
  };
11410
11419
  var findBestJump = ({
11411
11420
  allSamplesSortedByOffset,
@@ -11498,7 +11507,7 @@ var calculateJumpMarks = (samplePositionTracks, endOfMdat) => {
11498
11507
  addFinalJumpIfNecessary();
11499
11508
  break;
11500
11509
  }
11501
- const timestamp = currentSamplePosition.samplePosition.dts / currentSamplePosition.track.timescale;
11510
+ const timestamp = currentSamplePosition.samplePosition.decodingTimestamp / currentSamplePosition.track.originalTimescale;
11502
11511
  progresses[currentSamplePosition.track.trackId] = timestamp;
11503
11512
  const progressValues = Object.values(progresses);
11504
11513
  const maxProgress = Math.max(...progressValues);
@@ -11617,17 +11626,17 @@ var parseMdatSection = async (state) => {
11617
11626
  return makeFetchMoreData(samplesWithIndex.samplePosition.size - iterator.bytesRemaining());
11618
11627
  }
11619
11628
  const {
11620
- cts: rawCts,
11621
- dts: rawDts,
11629
+ timestamp: rawCts,
11630
+ decodingTimestamp: rawDts,
11622
11631
  duration: duration2,
11623
11632
  isKeyframe,
11624
11633
  offset,
11625
11634
  bigEndian,
11626
11635
  chunkSize
11627
11636
  } = samplesWithIndex.samplePosition;
11628
- const { timescale, startInSeconds } = samplesWithIndex.track;
11629
- const cts = rawCts + startInSeconds * timescale;
11630
- const dts = rawDts + startInSeconds * timescale;
11637
+ const { originalTimescale, startInSeconds } = samplesWithIndex.track;
11638
+ const cts = rawCts + startInSeconds * originalTimescale;
11639
+ const dts = rawDts + startInSeconds * originalTimescale;
11631
11640
  const bytes = postprocessBytes({
11632
11641
  bytes: iterator.getSlice(samplesWithIndex.samplePosition.size),
11633
11642
  bigEndian,
@@ -11639,16 +11648,16 @@ var parseMdatSection = async (state) => {
11639
11648
  data: bytes,
11640
11649
  timestamp: cts,
11641
11650
  duration: duration2,
11642
- cts,
11643
- dts,
11644
- trackId: samplesWithIndex.track.trackId,
11651
+ decodingTimestamp: dts,
11645
11652
  type: isKeyframe ? "key" : "delta",
11646
- offset,
11647
- timescale
11653
+ offset
11648
11654
  },
11649
- timescale
11655
+ timescale: originalTimescale
11656
+ });
11657
+ await state.callbacks.onAudioSample({
11658
+ audioSample,
11659
+ trackId: samplesWithIndex.track.trackId
11650
11660
  });
11651
- await state.callbacks.onAudioSample(samplesWithIndex.track.trackId, audioSample);
11652
11661
  }
11653
11662
  if (samplesWithIndex.track.type === "video") {
11654
11663
  const nalUnitType = bytes[4] & 31;
@@ -11662,16 +11671,16 @@ var parseMdatSection = async (state) => {
11662
11671
  data: bytes,
11663
11672
  timestamp: cts,
11664
11673
  duration: duration2,
11665
- cts,
11666
- dts,
11667
- trackId: samplesWithIndex.track.trackId,
11674
+ decodingTimestamp: dts,
11668
11675
  type: isKeyframe && !isRecoveryPoint ? "key" : "delta",
11669
- offset,
11670
- timescale
11676
+ offset
11671
11677
  },
11672
- timescale
11678
+ timescale: originalTimescale
11679
+ });
11680
+ await state.callbacks.onVideoSample({
11681
+ videoSample,
11682
+ trackId: samplesWithIndex.track.trackId
11673
11683
  });
11674
- await state.callbacks.onVideoSample(samplesWithIndex.track.trackId, videoSample);
11675
11684
  }
11676
11685
  const jump = jumpMarks.find((j) => j.afterSampleWithOffset === offset);
11677
11686
  if (jump) {
@@ -12046,6 +12055,9 @@ var forwardMediaParserControllerPauseResume = ({
12046
12055
  };
12047
12056
  // src/parse-media.ts
12048
12057
  var parseMedia = (options) => {
12058
+ if (!options) {
12059
+ return Promise.reject(new Error("No options provided. See https://www.remotion.dev/media-parser for how to get started."));
12060
+ }
12049
12061
  return internalParseMedia({
12050
12062
  fields: options.fields ?? null,
12051
12063
  logLevel: options.logLevel ?? "info",
@@ -12113,7 +12125,7 @@ var considerSeekBasedOnChunk = async ({
12113
12125
  await callback(sample);
12114
12126
  return;
12115
12127
  }
12116
- const timestamp = Math.min(sample.dts / sample.timescale, sample.cts / sample.timescale);
12128
+ const timestamp = Math.min(sample.decodingTimestamp / WEBCODECS_TIMESCALE, sample.timestamp / WEBCODECS_TIMESCALE);
12117
12129
  if (timestamp > pendingSeek.targetTime && chunkIndex !== null && chunkIndex > 0) {
12118
12130
  m3uState.setNextSeekShouldSubtractChunks(playlistUrl, subtractChunks + 1);
12119
12131
  parentController.seek(pendingSeek.targetTime);
@@ -12847,17 +12859,14 @@ var getAudioSampleFromCbr = ({
12847
12859
  const nthFrame = Math.round((initialOffset - state.mediaSection.getMediaSectionAssertOnlyOne().start) / avgLength);
12848
12860
  const durationInSeconds = samplesPerFrame / sampleRate;
12849
12861
  const timeInSeconds = nthFrame * samplesPerFrame / sampleRate;
12850
- const timestamp = Math.round(timeInSeconds * 1e6);
12851
- const duration2 = Math.round(durationInSeconds * 1e6);
12862
+ const timestamp = Math.round(timeInSeconds * WEBCODECS_TIMESCALE);
12863
+ const duration2 = Math.round(durationInSeconds * WEBCODECS_TIMESCALE);
12852
12864
  const audioSample = {
12853
12865
  data,
12854
- cts: timestamp,
12855
- dts: timestamp,
12866
+ decodingTimestamp: timestamp,
12856
12867
  duration: duration2,
12857
12868
  offset: initialOffset,
12858
- timescale: 1e6,
12859
12869
  timestamp,
12860
- trackId: 0,
12861
12870
  type: "key"
12862
12871
  };
12863
12872
  return { audioSample, timeInSeconds, durationInSeconds };
@@ -12894,17 +12903,14 @@ var getAudioSampleFromVbr = ({
12894
12903
  tableOfContents: info.xingData.tableOfContents
12895
12904
  });
12896
12905
  const durationInSeconds = samplesPerFrame / info.xingData.sampleRate;
12897
- const timestamp = Math.round(timeInSeconds * 1e6);
12898
- const duration2 = Math.round(durationInSeconds * 1e6);
12906
+ const timestamp = Math.round(timeInSeconds * WEBCODECS_TIMESCALE);
12907
+ const duration2 = Math.round(durationInSeconds * WEBCODECS_TIMESCALE);
12899
12908
  const audioSample = {
12900
12909
  data,
12901
- cts: timestamp,
12902
- dts: timestamp,
12910
+ decodingTimestamp: timestamp,
12903
12911
  duration: duration2,
12904
12912
  offset: position,
12905
- timescale: 1e6,
12906
12913
  timestamp,
12907
- trackId: 0,
12908
12914
  type: "key"
12909
12915
  };
12910
12916
  return { timeInSeconds, audioSample, durationInSeconds };
@@ -12977,9 +12983,10 @@ var parseMpegHeader = async ({
12977
12983
  description: undefined,
12978
12984
  numberOfChannels,
12979
12985
  sampleRate,
12980
- timescale: 1e6,
12986
+ originalTimescale: 1e6,
12981
12987
  trackId: 0,
12982
- startInSeconds: 0
12988
+ startInSeconds: 0,
12989
+ timescale: WEBCODECS_TIMESCALE
12983
12990
  },
12984
12991
  registerAudioSampleCallback: state.callbacks.registerAudioSampleCallback,
12985
12992
  tracks: state.callbacks.tracks,
@@ -13016,7 +13023,10 @@ var parseMpegHeader = async ({
13016
13023
  offset: initialOffset,
13017
13024
  durationInSeconds
13018
13025
  });
13019
- await state.callbacks.onAudioSample(0, audioSample);
13026
+ await state.callbacks.onAudioSample({
13027
+ audioSample,
13028
+ trackId: 0
13029
+ });
13020
13030
  };
13021
13031
 
13022
13032
  // src/containers/mp3/seek/wait-until-syncword.ts
@@ -13092,11 +13102,12 @@ var getStrhForIndex = (structure, trackId) => {
13092
13102
  // src/containers/riff/convert-queued-sample-to-mediaparser-sample.ts
13093
13103
  var getKeyFrameOffsetAndPocs = ({
13094
13104
  state,
13095
- sample
13105
+ sample,
13106
+ trackId
13096
13107
  }) => {
13097
13108
  if (sample.type === "key") {
13098
13109
  const sampleOffset = state.riff.sampleCounter.getSampleCountForTrack({
13099
- trackId: sample.trackId
13110
+ trackId
13100
13111
  });
13101
13112
  return {
13102
13113
  sampleOffsetAtKeyframe: sampleOffset,
@@ -13108,7 +13119,7 @@ var getKeyFrameOffsetAndPocs = ({
13108
13119
  if (!keyframeAtOffset) {
13109
13120
  throw new Error("no keyframe at offset");
13110
13121
  }
13111
- const sampleOffsetAtKeyframe = keyframeAtOffset.sampleCounts[sample.trackId];
13122
+ const sampleOffsetAtKeyframe = keyframeAtOffset.sampleCounts[trackId];
13112
13123
  const pocsAtKeyframeOffset = state.riff.sampleCounter.getPocAtKeyframeOffset({
13113
13124
  keyframeOffset: keyframeAtOffset.positionInBytes
13114
13125
  });
@@ -13117,12 +13128,17 @@ var getKeyFrameOffsetAndPocs = ({
13117
13128
  pocsAtKeyframeOffset
13118
13129
  };
13119
13130
  };
13120
- var convertQueuedSampleToMediaParserSample = (sample, state) => {
13121
- const strh = getStrhForIndex(state.structure.getRiffStructure(), sample.trackId);
13131
+ var convertQueuedSampleToMediaParserSample = ({
13132
+ sample,
13133
+ state,
13134
+ trackId
13135
+ }) => {
13136
+ const strh = getStrhForIndex(state.structure.getRiffStructure(), trackId);
13122
13137
  const samplesPerSecond = strh.rate / strh.scale;
13123
13138
  const { sampleOffsetAtKeyframe, pocsAtKeyframeOffset } = getKeyFrameOffsetAndPocs({
13124
13139
  sample,
13125
- state
13140
+ state,
13141
+ trackId
13126
13142
  });
13127
13143
  const indexOfPoc = pocsAtKeyframeOffset.findIndex((poc) => poc === sample.avc?.poc);
13128
13144
  if (indexOfPoc === -1) {
@@ -13134,8 +13150,7 @@ var convertQueuedSampleToMediaParserSample = (sample, state) => {
13134
13150
  sample: {
13135
13151
  ...sample,
13136
13152
  timestamp,
13137
- cts: timestamp,
13138
- dts: timestamp
13153
+ decodingTimestamp: timestamp
13139
13154
  },
13140
13155
  timescale: 1
13141
13156
  });
@@ -13545,10 +13560,8 @@ var handleChunk = async ({
13545
13560
  const rawSample = {
13546
13561
  data,
13547
13562
  duration: 1 / samplesPerSecond,
13548
- trackId,
13549
13563
  type: keyOrDelta === "bidirectional" ? "delta" : keyOrDelta,
13550
13564
  offset,
13551
- timescale: samplesPerSecond,
13552
13565
  avc: info
13553
13566
  };
13554
13567
  const maxFramesInBuffer = state.avc.getMaxFramesInBuffer();
@@ -13565,14 +13578,29 @@ var handleChunk = async ({
13565
13578
  poc: info.poc
13566
13579
  });
13567
13580
  }
13568
- state.riff.queuedBFrames.addFrame(rawSample, maxFramesInBuffer);
13581
+ state.riff.queuedBFrames.addFrame({
13582
+ frame: rawSample,
13583
+ trackId,
13584
+ maxFramesInBuffer,
13585
+ timescale: samplesPerSecond
13586
+ });
13569
13587
  const releasedFrame = state.riff.queuedBFrames.getReleasedFrame();
13570
13588
  if (!releasedFrame) {
13571
13589
  return;
13572
13590
  }
13573
- const videoSample = convertQueuedSampleToMediaParserSample(releasedFrame, state);
13574
- state.riff.sampleCounter.onVideoSample(videoSample);
13575
- await state.callbacks.onVideoSample(trackId, videoSample);
13591
+ const videoSample = convertQueuedSampleToMediaParserSample({
13592
+ sample: releasedFrame.sample,
13593
+ state,
13594
+ trackId: releasedFrame.trackId
13595
+ });
13596
+ state.riff.sampleCounter.onVideoSample({
13597
+ trackId,
13598
+ videoSample
13599
+ });
13600
+ await state.callbacks.onVideoSample({
13601
+ videoSample,
13602
+ trackId
13603
+ });
13576
13604
  }
13577
13605
  const audioChunk = ckId.match(/^([0-9]{2})wb$/);
13578
13606
  if (audioChunk) {
@@ -13587,24 +13615,21 @@ var handleChunk = async ({
13587
13615
  trackId
13588
13616
  });
13589
13617
  const timeInSec = nthSample / samplesPerSecond;
13590
- const timestamp = timeInSec;
13618
+ const timestamp = Math.round(timeInSec * WEBCODECS_TIMESCALE);
13591
13619
  const data = iterator.getSlice(ckSize);
13592
- const audioSample = convertAudioOrVideoSampleToWebCodecsTimestamps({
13593
- sample: {
13594
- cts: timestamp,
13595
- dts: timestamp,
13596
- data,
13597
- duration: undefined,
13598
- timestamp,
13599
- trackId,
13600
- type: "key",
13601
- offset,
13602
- timescale: samplesPerSecond
13603
- },
13604
- timescale: 1
13605
- });
13620
+ const audioSample = {
13621
+ decodingTimestamp: timestamp,
13622
+ data,
13623
+ duration: undefined,
13624
+ timestamp,
13625
+ type: "key",
13626
+ offset
13627
+ };
13606
13628
  state.riff.sampleCounter.onAudioSample(trackId, audioSample);
13607
- await state.callbacks.onAudioSample(trackId, audioSample);
13629
+ await state.callbacks.onAudioSample({
13630
+ audioSample,
13631
+ trackId
13632
+ });
13608
13633
  }
13609
13634
  };
13610
13635
  var parseMovi = async ({
@@ -13647,9 +13672,19 @@ var parseMediaSection = async (state) => {
13647
13672
  var parseRiffBody = async (state) => {
13648
13673
  const releasedFrame = state.riff.queuedBFrames.getReleasedFrame();
13649
13674
  if (releasedFrame) {
13650
- const converted = convertQueuedSampleToMediaParserSample(releasedFrame, state);
13651
- state.riff.sampleCounter.onVideoSample(converted);
13652
- await state.callbacks.onVideoSample(releasedFrame.trackId, converted);
13675
+ const converted = convertQueuedSampleToMediaParserSample({
13676
+ sample: releasedFrame.sample,
13677
+ state,
13678
+ trackId: releasedFrame.trackId
13679
+ });
13680
+ state.riff.sampleCounter.onVideoSample({
13681
+ trackId: releasedFrame.trackId,
13682
+ videoSample: converted
13683
+ });
13684
+ await state.callbacks.onVideoSample({
13685
+ videoSample: converted,
13686
+ trackId: releasedFrame.trackId
13687
+ });
13653
13688
  return null;
13654
13689
  }
13655
13690
  if (state.mediaSection.isCurrentByteInMediaSection(state.iterator) === "in-section") {
@@ -14027,13 +14062,14 @@ var handleAacPacket = async ({
14027
14062
  type: "audio",
14028
14063
  codecData: { type: "aac-config", data: codecPrivate2 },
14029
14064
  trackId: programId,
14030
- timescale: MPEG_TIMESCALE,
14065
+ originalTimescale: MPEG_TIMESCALE,
14031
14066
  codecEnum: "aac",
14032
14067
  codec: mapAudioObjectTypeToCodecString(audioObjectType),
14033
14068
  description: codecPrivate2,
14034
14069
  numberOfChannels: channelConfiguration,
14035
14070
  sampleRate,
14036
- startInSeconds: 0
14071
+ startInSeconds: 0,
14072
+ timescale: WEBCODECS_TIMESCALE
14037
14073
  };
14038
14074
  await registerAudioTrack({
14039
14075
  track,
@@ -14045,21 +14081,21 @@ var handleAacPacket = async ({
14045
14081
  });
14046
14082
  }
14047
14083
  const sample = {
14048
- cts: streamBuffer.pesHeader.pts - transportStream.startOffset.getOffset(programId),
14049
- dts: (streamBuffer.pesHeader.dts ?? streamBuffer.pesHeader.pts) - transportStream.startOffset.getOffset(programId),
14084
+ decodingTimestamp: (streamBuffer.pesHeader.dts ?? streamBuffer.pesHeader.pts) - transportStream.startOffset.getOffset(programId),
14050
14085
  timestamp: streamBuffer.pesHeader.pts - transportStream.startOffset.getOffset(programId),
14051
14086
  duration: undefined,
14052
14087
  data: streamBuffer.getBuffer(),
14053
- trackId: programId,
14054
14088
  type: "key",
14055
- offset,
14056
- timescale: MPEG_TIMESCALE
14089
+ offset
14057
14090
  };
14058
14091
  const audioSample = convertAudioOrVideoSampleToWebCodecsTimestamps({
14059
14092
  sample,
14060
14093
  timescale: MPEG_TIMESCALE
14061
14094
  });
14062
- await sampleCallbacks.onAudioSample(programId, audioSample);
14095
+ await sampleCallbacks.onAudioSample({
14096
+ audioSample,
14097
+ trackId: programId
14098
+ });
14063
14099
  transportStream.lastEmittedSample.setLastEmittedSample(sample);
14064
14100
  };
14065
14101
 
@@ -14550,9 +14586,10 @@ var parseFmt = async ({
14550
14586
  codecEnum: format,
14551
14587
  numberOfChannels,
14552
14588
  sampleRate,
14553
- timescale: 1e6,
14589
+ originalTimescale: 1e6,
14554
14590
  trackId: 0,
14555
- startInSeconds: 0
14591
+ startInSeconds: 0,
14592
+ timescale: WEBCODECS_TIMESCALE
14556
14593
  },
14557
14594
  container: "wav",
14558
14595
  registerAudioSampleCallback: state.callbacks.registerAudioSampleCallback,
@@ -14652,19 +14689,19 @@ var parseMediaSection2 = async ({
14652
14689
  const data = iterator.getSlice(toRead);
14653
14690
  const audioSample = convertAudioOrVideoSampleToWebCodecsTimestamps({
14654
14691
  sample: {
14655
- cts: timestamp,
14656
- dts: timestamp,
14692
+ decodingTimestamp: timestamp,
14657
14693
  data,
14658
14694
  duration: duration2,
14659
14695
  timestamp,
14660
- trackId: 0,
14661
14696
  type: "key",
14662
- offset,
14663
- timescale: 1e6
14697
+ offset
14664
14698
  },
14665
14699
  timescale: 1
14666
14700
  });
14667
- await state.callbacks.onAudioSample(0, audioSample);
14701
+ await state.callbacks.onAudioSample({
14702
+ audioSample,
14703
+ trackId: 0
14704
+ });
14668
14705
  return null;
14669
14706
  };
14670
14707
 
@@ -14837,19 +14874,18 @@ var getSampleFromBlock = async ({
14837
14874
  if (codec.startsWith("V_")) {
14838
14875
  const partialVideoSample = {
14839
14876
  data: iterator.getSlice(remainingNow),
14840
- cts: timecodeInMicroseconds,
14841
- dts: timecodeInMicroseconds,
14877
+ decodingTimestamp: timecodeInMicroseconds,
14842
14878
  duration: undefined,
14843
- trackId: trackNumber2,
14844
14879
  timestamp: timecodeInMicroseconds,
14845
- offset,
14846
- timescale
14880
+ offset
14847
14881
  };
14848
14882
  if (keyframe === null) {
14849
14883
  iterator.destroy();
14850
14884
  return {
14851
14885
  type: "partial-video-sample",
14852
- partialVideoSample
14886
+ partialVideoSample,
14887
+ trackId: trackNumber2,
14888
+ timescale: WEBCODECS_TIMESCALE
14853
14889
  };
14854
14890
  }
14855
14891
  await addAvcToTrackAndActivateTrackIfNecessary({
@@ -14870,25 +14906,26 @@ var getSampleFromBlock = async ({
14870
14906
  iterator.destroy();
14871
14907
  return {
14872
14908
  type: "video-sample",
14873
- videoSample: sample
14909
+ videoSample: sample,
14910
+ trackId: trackNumber2,
14911
+ timescale: WEBCODECS_TIMESCALE
14874
14912
  };
14875
14913
  }
14876
14914
  if (codec.startsWith("A_")) {
14877
14915
  const audioSample = {
14878
14916
  data: iterator.getSlice(remainingNow),
14879
- trackId: trackNumber2,
14880
14917
  timestamp: timecodeInMicroseconds,
14881
14918
  type: "key",
14882
14919
  duration: undefined,
14883
- cts: timecodeInMicroseconds,
14884
- dts: timecodeInMicroseconds,
14885
- offset,
14886
- timescale
14920
+ decodingTimestamp: timecodeInMicroseconds,
14921
+ offset
14887
14922
  };
14888
14923
  iterator.destroy();
14889
14924
  return {
14890
14925
  type: "audio-sample",
14891
- audioSample
14926
+ audioSample,
14927
+ trackId: trackNumber2,
14928
+ timescale: WEBCODECS_TIMESCALE
14892
14929
  };
14893
14930
  }
14894
14931
  iterator.destroy();
@@ -15049,7 +15086,10 @@ var postprocessEbml = async ({
15049
15086
  avcState
15050
15087
  });
15051
15088
  if (sample.type === "video-sample") {
15052
- await callbacks.onVideoSample(sample.videoSample.trackId, sample.videoSample);
15089
+ await callbacks.onVideoSample({
15090
+ videoSample: sample.videoSample,
15091
+ trackId: sample.trackId
15092
+ });
15053
15093
  return {
15054
15094
  type: "Block",
15055
15095
  value: new Uint8Array([]),
@@ -15057,7 +15097,10 @@ var postprocessEbml = async ({
15057
15097
  };
15058
15098
  }
15059
15099
  if (sample.type === "audio-sample") {
15060
- await callbacks.onAudioSample(sample.audioSample.trackId, sample.audioSample);
15100
+ await callbacks.onAudioSample({
15101
+ audioSample: sample.audioSample,
15102
+ trackId: sample.trackId
15103
+ });
15061
15104
  return {
15062
15105
  type: "Block",
15063
15106
  value: new Uint8Array([]),
@@ -15093,7 +15136,10 @@ var postprocessEbml = async ({
15093
15136
  ...sample.partialVideoSample,
15094
15137
  type: hasReferenceBlock ? "delta" : "key"
15095
15138
  };
15096
- await callbacks.onVideoSample(sample.partialVideoSample.trackId, completeFrame);
15139
+ await callbacks.onVideoSample({
15140
+ videoSample: completeFrame,
15141
+ trackId: sample.trackId
15142
+ });
15097
15143
  }
15098
15144
  return {
15099
15145
  type: "BlockGroup",
@@ -16146,7 +16192,7 @@ var sampleSorter = ({
16146
16192
  if (!callback) {
16147
16193
  throw new Error("No callback found for audio sample");
16148
16194
  }
16149
- latestSample[src] = sample.dts;
16195
+ latestSample[src] = sample.decodingTimestamp;
16150
16196
  await callback(sample);
16151
16197
  },
16152
16198
  addVideoSample: async (src, sample) => {
@@ -16154,7 +16200,7 @@ var sampleSorter = ({
16154
16200
  if (!callback) {
16155
16201
  throw new Error("No callback found for video sample.");
16156
16202
  }
16157
- latestSample[src] = sample.dts;
16203
+ latestSample[src] = sample.decodingTimestamp;
16158
16204
  await callback(sample);
16159
16205
  },
16160
16206
  getNextStreamToRun: (streams) => {
@@ -16649,7 +16695,10 @@ var fetchIdx1 = async ({
16649
16695
  logLevel,
16650
16696
  prefetchCache
16651
16697
  });
16652
- const iterator = getArrayBufferIterator(new Uint8Array, Infinity);
16698
+ if (result.contentLength === null) {
16699
+ throw new Error("Content length is null");
16700
+ }
16701
+ const iterator = getArrayBufferIterator(new Uint8Array, result.contentLength - position + 1);
16653
16702
  while (true) {
16654
16703
  const res = await result.reader.reader.read();
16655
16704
  if (res.value) {
@@ -16749,13 +16798,18 @@ var queuedBFramesState = () => {
16749
16798
  queuedFrames.length = 0;
16750
16799
  };
16751
16800
  return {
16752
- addFrame: (frame, maxFramesInBuffer) => {
16801
+ addFrame: ({
16802
+ frame,
16803
+ maxFramesInBuffer,
16804
+ trackId,
16805
+ timescale
16806
+ }) => {
16753
16807
  if (frame.type === "key") {
16754
16808
  flush();
16755
- releasedFrames.push(frame);
16809
+ releasedFrames.push({ sample: frame, trackId, timescale });
16756
16810
  return;
16757
16811
  }
16758
- queuedFrames.push(frame);
16812
+ queuedFrames.push({ sample: frame, trackId, timescale });
16759
16813
  if (queuedFrames.length > maxFramesInBuffer) {
16760
16814
  releasedFrames.push(queuedFrames.shift());
16761
16815
  }
@@ -16816,22 +16870,25 @@ var riffSampleCounter = () => {
16816
16870
  }
16817
16871
  samplesForTrack[trackId]++;
16818
16872
  };
16819
- const onVideoSample = (videoSample) => {
16820
- if (typeof samplesForTrack[videoSample.trackId] === "undefined") {
16821
- samplesForTrack[videoSample.trackId] = 0;
16873
+ const onVideoSample = ({
16874
+ trackId,
16875
+ videoSample
16876
+ }) => {
16877
+ if (typeof samplesForTrack[trackId] === "undefined") {
16878
+ samplesForTrack[trackId] = 0;
16822
16879
  }
16823
16880
  if (videoSample.type === "key") {
16824
16881
  riffKeys.addKeyframe({
16825
- trackId: videoSample.trackId,
16826
- decodingTimeInSeconds: videoSample.dts / videoSample.timescale,
16882
+ trackId,
16883
+ decodingTimeInSeconds: videoSample.decodingTimestamp / WEBCODECS_TIMESCALE,
16827
16884
  positionInBytes: videoSample.offset,
16828
- presentationTimeInSeconds: videoSample.cts / videoSample.timescale,
16885
+ presentationTimeInSeconds: videoSample.timestamp / WEBCODECS_TIMESCALE,
16829
16886
  sizeInBytes: videoSample.data.length,
16830
16887
  sampleCounts: { ...samplesForTrack }
16831
16888
  });
16832
16889
  }
16833
16890
  if (videoSample.data.length > 0) {
16834
- samplesForTrack[videoSample.trackId]++;
16891
+ samplesForTrack[trackId]++;
16835
16892
  }
16836
16893
  };
16837
16894
  const getSampleCountForTrack = ({ trackId }) => {
@@ -16963,7 +17020,10 @@ var callbacksState = ({
16963
17020
  }
16964
17021
  queuedVideoSamples[id] = [];
16965
17022
  },
16966
- onAudioSample: async (trackId, audioSample) => {
17023
+ onAudioSample: async ({
17024
+ audioSample,
17025
+ trackId
17026
+ }) => {
16967
17027
  if (controller._internals.signal.aborted) {
16968
17028
  throw new Error("Aborted");
16969
17029
  }
@@ -16981,7 +17041,10 @@ var callbacksState = ({
16981
17041
  samplesObserved.addAudioSample(audioSample);
16982
17042
  }
16983
17043
  },
16984
- onVideoSample: async (trackId, videoSample) => {
17044
+ onVideoSample: async ({
17045
+ trackId,
17046
+ videoSample
17047
+ }) => {
16985
17048
  if (controller._internals.signal.aborted) {
16986
17049
  throw new Error("Aborted");
16987
17050
  }
@@ -16998,9 +17061,9 @@ var callbacksState = ({
16998
17061
  if (videoSample.type === "key") {
16999
17062
  keyframes.addKeyframe({
17000
17063
  trackId,
17001
- decodingTimeInSeconds: videoSample.dts / videoSample.timescale,
17064
+ decodingTimeInSeconds: videoSample.decodingTimestamp / WEBCODECS_TIMESCALE,
17002
17065
  positionInBytes: videoSample.offset,
17003
- presentationTimeInSeconds: videoSample.cts / videoSample.timescale,
17066
+ presentationTimeInSeconds: videoSample.timestamp / WEBCODECS_TIMESCALE,
17004
17067
  sizeInBytes: videoSample.data.length
17005
17068
  });
17006
17069
  }
@@ -17055,9 +17118,9 @@ var samplesObservedState = () => {
17055
17118
  return largestSample - smallestSample;
17056
17119
  };
17057
17120
  const addVideoSample = (videoSample) => {
17058
- videoSamples.set(videoSample.cts, videoSample.data.byteLength);
17059
- const presentationTimeInSeconds = videoSample.cts / videoSample.timescale;
17060
- const duration2 = (videoSample.duration ?? 0) / videoSample.timescale;
17121
+ videoSamples.set(videoSample.timestamp, videoSample.data.byteLength);
17122
+ const presentationTimeInSeconds = videoSample.timestamp / WEBCODECS_TIMESCALE;
17123
+ const duration2 = (videoSample.duration ?? 0) / WEBCODECS_TIMESCALE;
17061
17124
  if (largestVideoSample === undefined || presentationTimeInSeconds > largestVideoSample) {
17062
17125
  largestVideoSample = presentationTimeInSeconds + duration2;
17063
17126
  }
@@ -17066,9 +17129,9 @@ var samplesObservedState = () => {
17066
17129
  }
17067
17130
  };
17068
17131
  const addAudioSample = (audioSample) => {
17069
- audioSamples.set(audioSample.cts, audioSample.data.byteLength);
17070
- const presentationTimeInSeconds = audioSample.cts / audioSample.timescale;
17071
- const duration2 = (audioSample.duration ?? 0) / audioSample.timescale;
17132
+ audioSamples.set(audioSample.timestamp, audioSample.data.byteLength);
17133
+ const presentationTimeInSeconds = audioSample.timestamp / WEBCODECS_TIMESCALE;
17134
+ const duration2 = (audioSample.duration ?? 0) / WEBCODECS_TIMESCALE;
17072
17135
  if (largestAudioSample === undefined || presentationTimeInSeconds > largestAudioSample) {
17073
17136
  largestAudioSample = presentationTimeInSeconds + duration2;
17074
17137
  }
@@ -17436,6 +17499,9 @@ var internalParseMedia = async function({
17436
17499
  seekingHints,
17437
17500
  ...more
17438
17501
  }) {
17502
+ if (!src) {
17503
+ throw new Error('No "src" provided');
17504
+ }
17439
17505
  controller._internals.markAsReadyToEmitEvents();
17440
17506
  warnIfRemotionLicenseNotAcknowledged({
17441
17507
  acknowledgeRemotionLicense,
@@ -17648,7 +17714,7 @@ var post = (message) => {
17648
17714
  };
17649
17715
  var controller = mediaParserController();
17650
17716
  var executeCallback = (payload) => {
17651
- const nonce = crypto.randomUUID();
17717
+ const nonce = String(Math.random());
17652
17718
  const { promise, resolve, reject } = withResolvers();
17653
17719
  const cb = (msg) => {
17654
17720
  const data = msg.data;