@remotion/media-parser 4.0.305 → 4.0.308

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,9 +1,9 @@
1
1
  import type { MediaParserController } from '../../controller/media-parser-controller';
2
2
  import type { M3uState } from '../../state/m3u-state';
3
- import type { MediaParserVideoSample } from '../../webcodec-sample-types';
3
+ import type { MediaParserOnVideoSample, MediaParserVideoSample } from '../../webcodec-sample-types';
4
4
  export declare const considerSeekBasedOnChunk: ({ sample, parentController, childController, callback, m3uState, playlistUrl, subtractChunks, chunkIndex, }: {
5
5
  sample: MediaParserVideoSample;
6
- callback: (sample: MediaParserVideoSample) => void | Promise<void>;
6
+ callback: MediaParserOnVideoSample;
7
7
  parentController: MediaParserController;
8
8
  childController: MediaParserController;
9
9
  playlistUrl: string;
@@ -18,8 +18,10 @@ const getAudioSampleFromCbr = ({ bitrateInKbit, initialOffset, layer, sampleRate
18
18
  avgLength);
19
19
  const durationInSeconds = samplesPerFrame / sampleRate;
20
20
  const timeInSeconds = (nthFrame * samplesPerFrame) / sampleRate;
21
- const timestamp = Math.round(timeInSeconds * webcodecs_timescale_1.WEBCODECS_TIMESCALE);
22
- const duration = Math.round(durationInSeconds * webcodecs_timescale_1.WEBCODECS_TIMESCALE);
21
+ // Important that we round down, otherwise WebCodecs might stall, e.g.
22
+ // Last input = 30570667 Last output = 30570666 -> stuck
23
+ const timestamp = Math.floor(timeInSeconds * webcodecs_timescale_1.WEBCODECS_TIMESCALE);
24
+ const duration = Math.floor(durationInSeconds * webcodecs_timescale_1.WEBCODECS_TIMESCALE);
23
25
  const audioSample = {
24
26
  data,
25
27
  decodingTimestamp: timestamp,
@@ -30,8 +30,8 @@ const getAudioSampleFromVbr = ({ info, position, mp3Info, data, }) => {
30
30
  tableOfContents: info.xingData.tableOfContents,
31
31
  });
32
32
  const durationInSeconds = samplesPerFrame / info.xingData.sampleRate;
33
- const timestamp = Math.round(timeInSeconds * webcodecs_timescale_1.WEBCODECS_TIMESCALE);
34
- const duration = Math.round(durationInSeconds * webcodecs_timescale_1.WEBCODECS_TIMESCALE);
33
+ const timestamp = Math.floor(timeInSeconds * webcodecs_timescale_1.WEBCODECS_TIMESCALE);
34
+ const duration = Math.floor(durationInSeconds * webcodecs_timescale_1.WEBCODECS_TIMESCALE);
35
35
  const audioSample = {
36
36
  data,
37
37
  decodingTimestamp: timestamp,
@@ -86,7 +86,7 @@ const handleChunk = async ({ state, ckId, ckSize, }) => {
86
86
  trackId,
87
87
  });
88
88
  const timeInSec = nthSample / samplesPerSecond;
89
- const timestamp = Math.round(timeInSec * webcodecs_timescale_1.WEBCODECS_TIMESCALE);
89
+ const timestamp = Math.floor(timeInSec * webcodecs_timescale_1.WEBCODECS_TIMESCALE);
90
90
  const data = iterator.getSlice(ckSize);
91
91
  const audioSample = {
92
92
  decodingTimestamp: timestamp,
@@ -3,13 +3,14 @@ import type { PrefetchCache } from '../../../fetch';
3
3
  import { type MediaParserLogLevel } from '../../../log';
4
4
  import type { ParseMediaSrc } from '../../../options';
5
5
  import type { MediaParserReaderInterface } from '../../../readers/reader';
6
- export declare const fetchIdx1: ({ src, readerInterface, controller, position, logLevel, prefetchCache, }: {
6
+ export declare const fetchIdx1: ({ src, readerInterface, controller, position, logLevel, prefetchCache, contentLength, }: {
7
7
  src: ParseMediaSrc;
8
8
  readerInterface: MediaParserReaderInterface;
9
9
  controller: MediaParserController;
10
10
  position: number;
11
11
  logLevel: MediaParserLogLevel;
12
12
  prefetchCache: PrefetchCache;
13
+ contentLength: number;
13
14
  }) => Promise<{
14
15
  entries: import("../riff-box").Idx1Entry[];
15
16
  videoTrackIndex: number | null;
@@ -4,7 +4,7 @@ exports.fetchIdx1 = void 0;
4
4
  const buffer_iterator_1 = require("../../../iterator/buffer-iterator");
5
5
  const log_1 = require("../../../log");
6
6
  const expect_riff_box_1 = require("../expect-riff-box");
7
- const fetchIdx1 = async ({ src, readerInterface, controller, position, logLevel, prefetchCache, }) => {
7
+ const fetchIdx1 = async ({ src, readerInterface, controller, position, logLevel, prefetchCache, contentLength, }) => {
8
8
  log_1.Log.verbose(logLevel, 'Making request to fetch idx1 from ', src, 'position', position);
9
9
  const result = await readerInterface.read({
10
10
  controller,
@@ -16,7 +16,7 @@ const fetchIdx1 = async ({ src, readerInterface, controller, position, logLevel,
16
16
  if (result.contentLength === null) {
17
17
  throw new Error('Content length is null');
18
18
  }
19
- const iterator = (0, buffer_iterator_1.getArrayBufferIterator)(new Uint8Array(), result.contentLength - position + 1);
19
+ const iterator = (0, buffer_iterator_1.getArrayBufferIterator)(new Uint8Array(), contentLength - position + 1);
20
20
  while (true) {
21
21
  const res = await result.reader.reader.read();
22
22
  if (res.value) {
@@ -28,9 +28,12 @@ const parseWav = (state) => {
28
28
  if (type === 'LIST') {
29
29
  return (0, parse_list_1.parseList)({ state });
30
30
  }
31
- if (type === 'id3') {
31
+ if (type === 'id3' || type === 'ID3') {
32
32
  return (0, parse_id3_1.parseId3)({ state });
33
33
  }
34
+ if (type === '\u0000') {
35
+ return Promise.resolve(null);
36
+ }
34
37
  throw new Error(`Unknown WAV box type ${type}`);
35
38
  };
36
39
  exports.parseWav = parseWav;
@@ -1295,7 +1295,7 @@ var getArrayBufferIterator = (initialData, maxBytes) => {
1295
1295
  };
1296
1296
  const getSlice = (amount) => {
1297
1297
  const value = uintArray.slice(counter.getDiscardedOffset(), counter.getDiscardedOffset() + amount);
1298
- counter.increment(amount);
1298
+ counter.increment(value.length);
1299
1299
  return value;
1300
1300
  };
1301
1301
  const discard = (length) => {
@@ -13251,8 +13251,8 @@ var getAudioSampleFromCbr = ({
13251
13251
  const nthFrame = Math.round((initialOffset - state.mediaSection.getMediaSectionAssertOnlyOne().start) / avgLength);
13252
13252
  const durationInSeconds = samplesPerFrame / sampleRate;
13253
13253
  const timeInSeconds = nthFrame * samplesPerFrame / sampleRate;
13254
- const timestamp = Math.round(timeInSeconds * WEBCODECS_TIMESCALE);
13255
- const duration2 = Math.round(durationInSeconds * WEBCODECS_TIMESCALE);
13254
+ const timestamp = Math.floor(timeInSeconds * WEBCODECS_TIMESCALE);
13255
+ const duration2 = Math.floor(durationInSeconds * WEBCODECS_TIMESCALE);
13256
13256
  const audioSample = {
13257
13257
  data,
13258
13258
  decodingTimestamp: timestamp,
@@ -13295,8 +13295,8 @@ var getAudioSampleFromVbr = ({
13295
13295
  tableOfContents: info.xingData.tableOfContents
13296
13296
  });
13297
13297
  const durationInSeconds = samplesPerFrame / info.xingData.sampleRate;
13298
- const timestamp = Math.round(timeInSeconds * WEBCODECS_TIMESCALE);
13299
- const duration2 = Math.round(durationInSeconds * WEBCODECS_TIMESCALE);
13298
+ const timestamp = Math.floor(timeInSeconds * WEBCODECS_TIMESCALE);
13299
+ const duration2 = Math.floor(durationInSeconds * WEBCODECS_TIMESCALE);
13300
13300
  const audioSample = {
13301
13301
  data,
13302
13302
  decodingTimestamp: timestamp,
@@ -14007,7 +14007,7 @@ var handleChunk = async ({
14007
14007
  trackId
14008
14008
  });
14009
14009
  const timeInSec = nthSample / samplesPerSecond;
14010
- const timestamp = Math.round(timeInSec * WEBCODECS_TIMESCALE);
14010
+ const timestamp = Math.floor(timeInSec * WEBCODECS_TIMESCALE);
14011
14011
  const data = iterator.getSlice(ckSize);
14012
14012
  const audioSample = {
14013
14013
  decodingTimestamp: timestamp,
@@ -15118,9 +15118,12 @@ var parseWav = (state) => {
15118
15118
  if (type === "LIST") {
15119
15119
  return parseList({ state });
15120
15120
  }
15121
- if (type === "id3") {
15121
+ if (type === "id3" || type === "ID3") {
15122
15122
  return parseId32({ state });
15123
15123
  }
15124
+ if (type === "\x00") {
15125
+ return Promise.resolve(null);
15126
+ }
15124
15127
  throw new Error(`Unknown WAV box type ${type}`);
15125
15128
  };
15126
15129
 
@@ -15625,6 +15628,7 @@ var parseLoop = async ({
15625
15628
  }
15626
15629
  }
15627
15630
  state.samplesObserved.setLastSampleObserved();
15631
+ await state.callbacks.callTracksDoneCallback();
15628
15632
  if (state.controller._internals.seekSignal.getSeek() !== null) {
15629
15633
  Log.verbose(state.logLevel, "Reached end of samples, but there is a pending seek. Trying to seek...");
15630
15634
  await workOnSeekRequest(getWorkOnSeekRequestOptions(state));
@@ -16688,7 +16692,8 @@ var fetchIdx1 = async ({
16688
16692
  controller,
16689
16693
  position,
16690
16694
  logLevel,
16691
- prefetchCache
16695
+ prefetchCache,
16696
+ contentLength
16692
16697
  }) => {
16693
16698
  Log.verbose(logLevel, "Making request to fetch idx1 from ", src, "position", position);
16694
16699
  const result = await readerInterface.read({
@@ -16701,7 +16706,7 @@ var fetchIdx1 = async ({
16701
16706
  if (result.contentLength === null) {
16702
16707
  throw new Error("Content length is null");
16703
16708
  }
16704
- const iterator = getArrayBufferIterator(new Uint8Array, result.contentLength - position + 1);
16709
+ const iterator = getArrayBufferIterator(new Uint8Array, contentLength - position + 1);
16705
16710
  while (true) {
16706
16711
  const res = await result.reader.reader.read();
16707
16712
  if (res.value) {
@@ -16731,7 +16736,8 @@ var lazyIdx1Fetch = ({
16731
16736
  logLevel,
16732
16737
  readerInterface,
16733
16738
  src,
16734
- prefetchCache
16739
+ prefetchCache,
16740
+ contentLength
16735
16741
  }) => {
16736
16742
  let prom = null;
16737
16743
  let result = null;
@@ -16748,7 +16754,8 @@ var lazyIdx1Fetch = ({
16748
16754
  position,
16749
16755
  readerInterface,
16750
16756
  src,
16751
- prefetchCache
16757
+ prefetchCache,
16758
+ contentLength
16752
16759
  }).then((entries) => {
16753
16760
  prom = null;
16754
16761
  result = entries;
@@ -16944,7 +16951,8 @@ var riffSpecificState = ({
16944
16951
  logLevel,
16945
16952
  readerInterface,
16946
16953
  src,
16947
- prefetchCache
16954
+ prefetchCache,
16955
+ contentLength
16948
16956
  }) => {
16949
16957
  let avcProfile = null;
16950
16958
  let nextTrackIndex = 0;
@@ -16964,7 +16972,8 @@ var riffSpecificState = ({
16964
16972
  logLevel,
16965
16973
  readerInterface,
16966
16974
  src,
16967
- prefetchCache
16975
+ prefetchCache,
16976
+ contentLength
16968
16977
  });
16969
16978
  const sampleCounter = riffSampleCounter();
16970
16979
  const queuedBFrames = queuedBFramesState();
@@ -17002,6 +17011,7 @@ var callbacksState = ({
17002
17011
  }) => {
17003
17012
  const videoSampleCallbacks = {};
17004
17013
  const audioSampleCallbacks = {};
17014
+ const onTrackDoneCallback = {};
17005
17015
  const queuedAudioSamples = {};
17006
17016
  const queuedVideoSamples = {};
17007
17017
  const canSkipTracksState = makeCanSkipTracksState({
@@ -17036,7 +17046,8 @@ var callbacksState = ({
17036
17046
  if (seekSignal.getSeek() !== null) {
17037
17047
  Log.trace(logLevel, "Not emitting sample because seek is processing");
17038
17048
  } else {
17039
- await callback(audioSample);
17049
+ const trackDoneCallback = await callback(audioSample);
17050
+ onTrackDoneCallback[trackId] = trackDoneCallback ?? null;
17040
17051
  }
17041
17052
  }
17042
17053
  }
@@ -17057,7 +17068,8 @@ var callbacksState = ({
17057
17068
  if (seekSignal.getSeek() !== null) {
17058
17069
  Log.trace(logLevel, "Not emitting sample because seek is processing");
17059
17070
  } else {
17060
- await callback(videoSample);
17071
+ const trackDoneCallback = await callback(videoSample);
17072
+ onTrackDoneCallback[trackId] = trackDoneCallback ?? null;
17061
17073
  }
17062
17074
  }
17063
17075
  }
@@ -17093,7 +17105,14 @@ var callbacksState = ({
17093
17105
  audioSampleCallbacks,
17094
17106
  videoSampleCallbacks,
17095
17107
  hasAudioTrackHandlers,
17096
- hasVideoTrackHandlers
17108
+ hasVideoTrackHandlers,
17109
+ callTracksDoneCallback: async () => {
17110
+ for (const callback of Object.values(onTrackDoneCallback)) {
17111
+ if (callback) {
17112
+ await callback();
17113
+ }
17114
+ }
17115
+ }
17097
17116
  };
17098
17117
  };
17099
17118
 
@@ -17345,7 +17364,8 @@ var makeParserState = ({
17345
17364
  logLevel,
17346
17365
  readerInterface,
17347
17366
  src,
17348
- prefetchCache
17367
+ prefetchCache,
17368
+ contentLength
17349
17369
  }),
17350
17370
  transportStream: transportStreamState(),
17351
17371
  webm: webmState({
@@ -17679,7 +17699,7 @@ var downloadAndParseMedia = async (options) => {
17679
17699
  return returnValue;
17680
17700
  };
17681
17701
  // src/version.ts
17682
- var VERSION = "4.0.305";
17702
+ var VERSION = "4.0.308";
17683
17703
 
17684
17704
  // src/index.ts
17685
17705
  var MediaParserInternals = {
@@ -2556,7 +2556,7 @@ var getArrayBufferIterator = (initialData, maxBytes) => {
2556
2556
  };
2557
2557
  const getSlice = (amount) => {
2558
2558
  const value = uintArray.slice(counter.getDiscardedOffset(), counter.getDiscardedOffset() + amount);
2559
- counter.increment(amount);
2559
+ counter.increment(value.length);
2560
2560
  return value;
2561
2561
  };
2562
2562
  const discard = (length) => {
@@ -12990,8 +12990,8 @@ var getAudioSampleFromCbr = ({
12990
12990
  const nthFrame = Math.round((initialOffset - state.mediaSection.getMediaSectionAssertOnlyOne().start) / avgLength);
12991
12991
  const durationInSeconds = samplesPerFrame / sampleRate;
12992
12992
  const timeInSeconds = nthFrame * samplesPerFrame / sampleRate;
12993
- const timestamp = Math.round(timeInSeconds * WEBCODECS_TIMESCALE);
12994
- const duration2 = Math.round(durationInSeconds * WEBCODECS_TIMESCALE);
12993
+ const timestamp = Math.floor(timeInSeconds * WEBCODECS_TIMESCALE);
12994
+ const duration2 = Math.floor(durationInSeconds * WEBCODECS_TIMESCALE);
12995
12995
  const audioSample = {
12996
12996
  data,
12997
12997
  decodingTimestamp: timestamp,
@@ -13034,8 +13034,8 @@ var getAudioSampleFromVbr = ({
13034
13034
  tableOfContents: info.xingData.tableOfContents
13035
13035
  });
13036
13036
  const durationInSeconds = samplesPerFrame / info.xingData.sampleRate;
13037
- const timestamp = Math.round(timeInSeconds * WEBCODECS_TIMESCALE);
13038
- const duration2 = Math.round(durationInSeconds * WEBCODECS_TIMESCALE);
13037
+ const timestamp = Math.floor(timeInSeconds * WEBCODECS_TIMESCALE);
13038
+ const duration2 = Math.floor(durationInSeconds * WEBCODECS_TIMESCALE);
13039
13039
  const audioSample = {
13040
13040
  data,
13041
13041
  decodingTimestamp: timestamp,
@@ -13746,7 +13746,7 @@ var handleChunk = async ({
13746
13746
  trackId
13747
13747
  });
13748
13748
  const timeInSec = nthSample / samplesPerSecond;
13749
- const timestamp = Math.round(timeInSec * WEBCODECS_TIMESCALE);
13749
+ const timestamp = Math.floor(timeInSec * WEBCODECS_TIMESCALE);
13750
13750
  const data = iterator.getSlice(ckSize);
13751
13751
  const audioSample = {
13752
13752
  decodingTimestamp: timestamp,
@@ -14857,9 +14857,12 @@ var parseWav = (state) => {
14857
14857
  if (type === "LIST") {
14858
14858
  return parseList({ state });
14859
14859
  }
14860
- if (type === "id3") {
14860
+ if (type === "id3" || type === "ID3") {
14861
14861
  return parseId32({ state });
14862
14862
  }
14863
+ if (type === "\x00") {
14864
+ return Promise.resolve(null);
14865
+ }
14863
14866
  throw new Error(`Unknown WAV box type ${type}`);
14864
14867
  };
14865
14868
 
@@ -15753,6 +15756,7 @@ var parseLoop = async ({
15753
15756
  }
15754
15757
  }
15755
15758
  state.samplesObserved.setLastSampleObserved();
15759
+ await state.callbacks.callTracksDoneCallback();
15756
15760
  if (state.controller._internals.seekSignal.getSeek() !== null) {
15757
15761
  Log.verbose(state.logLevel, "Reached end of samples, but there is a pending seek. Trying to seek...");
15758
15762
  await workOnSeekRequest(getWorkOnSeekRequestOptions(state));
@@ -16816,7 +16820,8 @@ var fetchIdx1 = async ({
16816
16820
  controller,
16817
16821
  position,
16818
16822
  logLevel,
16819
- prefetchCache
16823
+ prefetchCache,
16824
+ contentLength
16820
16825
  }) => {
16821
16826
  Log.verbose(logLevel, "Making request to fetch idx1 from ", src, "position", position);
16822
16827
  const result = await readerInterface.read({
@@ -16829,7 +16834,7 @@ var fetchIdx1 = async ({
16829
16834
  if (result.contentLength === null) {
16830
16835
  throw new Error("Content length is null");
16831
16836
  }
16832
- const iterator = getArrayBufferIterator(new Uint8Array, result.contentLength - position + 1);
16837
+ const iterator = getArrayBufferIterator(new Uint8Array, contentLength - position + 1);
16833
16838
  while (true) {
16834
16839
  const res = await result.reader.reader.read();
16835
16840
  if (res.value) {
@@ -16859,7 +16864,8 @@ var lazyIdx1Fetch = ({
16859
16864
  logLevel,
16860
16865
  readerInterface,
16861
16866
  src,
16862
- prefetchCache
16867
+ prefetchCache,
16868
+ contentLength
16863
16869
  }) => {
16864
16870
  let prom = null;
16865
16871
  let result = null;
@@ -16876,7 +16882,8 @@ var lazyIdx1Fetch = ({
16876
16882
  position,
16877
16883
  readerInterface,
16878
16884
  src,
16879
- prefetchCache
16885
+ prefetchCache,
16886
+ contentLength
16880
16887
  }).then((entries) => {
16881
16888
  prom = null;
16882
16889
  result = entries;
@@ -17072,7 +17079,8 @@ var riffSpecificState = ({
17072
17079
  logLevel,
17073
17080
  readerInterface,
17074
17081
  src,
17075
- prefetchCache
17082
+ prefetchCache,
17083
+ contentLength
17076
17084
  }) => {
17077
17085
  let avcProfile = null;
17078
17086
  let nextTrackIndex = 0;
@@ -17092,7 +17100,8 @@ var riffSpecificState = ({
17092
17100
  logLevel,
17093
17101
  readerInterface,
17094
17102
  src,
17095
- prefetchCache
17103
+ prefetchCache,
17104
+ contentLength
17096
17105
  });
17097
17106
  const sampleCounter = riffSampleCounter();
17098
17107
  const queuedBFrames = queuedBFramesState();
@@ -17130,6 +17139,7 @@ var callbacksState = ({
17130
17139
  }) => {
17131
17140
  const videoSampleCallbacks = {};
17132
17141
  const audioSampleCallbacks = {};
17142
+ const onTrackDoneCallback = {};
17133
17143
  const queuedAudioSamples = {};
17134
17144
  const queuedVideoSamples = {};
17135
17145
  const canSkipTracksState = makeCanSkipTracksState({
@@ -17164,7 +17174,8 @@ var callbacksState = ({
17164
17174
  if (seekSignal.getSeek() !== null) {
17165
17175
  Log.trace(logLevel, "Not emitting sample because seek is processing");
17166
17176
  } else {
17167
- await callback(audioSample);
17177
+ const trackDoneCallback = await callback(audioSample);
17178
+ onTrackDoneCallback[trackId] = trackDoneCallback ?? null;
17168
17179
  }
17169
17180
  }
17170
17181
  }
@@ -17185,7 +17196,8 @@ var callbacksState = ({
17185
17196
  if (seekSignal.getSeek() !== null) {
17186
17197
  Log.trace(logLevel, "Not emitting sample because seek is processing");
17187
17198
  } else {
17188
- await callback(videoSample);
17199
+ const trackDoneCallback = await callback(videoSample);
17200
+ onTrackDoneCallback[trackId] = trackDoneCallback ?? null;
17189
17201
  }
17190
17202
  }
17191
17203
  }
@@ -17221,7 +17233,14 @@ var callbacksState = ({
17221
17233
  audioSampleCallbacks,
17222
17234
  videoSampleCallbacks,
17223
17235
  hasAudioTrackHandlers,
17224
- hasVideoTrackHandlers
17236
+ hasVideoTrackHandlers,
17237
+ callTracksDoneCallback: async () => {
17238
+ for (const callback of Object.values(onTrackDoneCallback)) {
17239
+ if (callback) {
17240
+ await callback();
17241
+ }
17242
+ }
17243
+ }
17225
17244
  };
17226
17245
  };
17227
17246
 
@@ -17473,7 +17492,8 @@ var makeParserState = ({
17473
17492
  logLevel,
17474
17493
  readerInterface,
17475
17494
  src,
17476
- prefetchCache
17495
+ prefetchCache,
17496
+ contentLength
17477
17497
  }),
17478
17498
  transportStream: transportStreamState(),
17479
17499
  webm: webmState({
@@ -2453,7 +2453,7 @@ var getArrayBufferIterator = (initialData, maxBytes) => {
2453
2453
  };
2454
2454
  const getSlice = (amount) => {
2455
2455
  const value = uintArray.slice(counter.getDiscardedOffset(), counter.getDiscardedOffset() + amount);
2456
- counter.increment(amount);
2456
+ counter.increment(value.length);
2457
2457
  return value;
2458
2458
  };
2459
2459
  const discard = (length) => {
@@ -12859,8 +12859,8 @@ var getAudioSampleFromCbr = ({
12859
12859
  const nthFrame = Math.round((initialOffset - state.mediaSection.getMediaSectionAssertOnlyOne().start) / avgLength);
12860
12860
  const durationInSeconds = samplesPerFrame / sampleRate;
12861
12861
  const timeInSeconds = nthFrame * samplesPerFrame / sampleRate;
12862
- const timestamp = Math.round(timeInSeconds * WEBCODECS_TIMESCALE);
12863
- const duration2 = Math.round(durationInSeconds * WEBCODECS_TIMESCALE);
12862
+ const timestamp = Math.floor(timeInSeconds * WEBCODECS_TIMESCALE);
12863
+ const duration2 = Math.floor(durationInSeconds * WEBCODECS_TIMESCALE);
12864
12864
  const audioSample = {
12865
12865
  data,
12866
12866
  decodingTimestamp: timestamp,
@@ -12903,8 +12903,8 @@ var getAudioSampleFromVbr = ({
12903
12903
  tableOfContents: info.xingData.tableOfContents
12904
12904
  });
12905
12905
  const durationInSeconds = samplesPerFrame / info.xingData.sampleRate;
12906
- const timestamp = Math.round(timeInSeconds * WEBCODECS_TIMESCALE);
12907
- const duration2 = Math.round(durationInSeconds * WEBCODECS_TIMESCALE);
12906
+ const timestamp = Math.floor(timeInSeconds * WEBCODECS_TIMESCALE);
12907
+ const duration2 = Math.floor(durationInSeconds * WEBCODECS_TIMESCALE);
12908
12908
  const audioSample = {
12909
12909
  data,
12910
12910
  decodingTimestamp: timestamp,
@@ -13615,7 +13615,7 @@ var handleChunk = async ({
13615
13615
  trackId
13616
13616
  });
13617
13617
  const timeInSec = nthSample / samplesPerSecond;
13618
- const timestamp = Math.round(timeInSec * WEBCODECS_TIMESCALE);
13618
+ const timestamp = Math.floor(timeInSec * WEBCODECS_TIMESCALE);
13619
13619
  const data = iterator.getSlice(ckSize);
13620
13620
  const audioSample = {
13621
13621
  decodingTimestamp: timestamp,
@@ -14726,9 +14726,12 @@ var parseWav = (state) => {
14726
14726
  if (type === "LIST") {
14727
14727
  return parseList({ state });
14728
14728
  }
14729
- if (type === "id3") {
14729
+ if (type === "id3" || type === "ID3") {
14730
14730
  return parseId32({ state });
14731
14731
  }
14732
+ if (type === "\x00") {
14733
+ return Promise.resolve(null);
14734
+ }
14732
14735
  throw new Error(`Unknown WAV box type ${type}`);
14733
14736
  };
14734
14737
 
@@ -15622,6 +15625,7 @@ var parseLoop = async ({
15622
15625
  }
15623
15626
  }
15624
15627
  state.samplesObserved.setLastSampleObserved();
15628
+ await state.callbacks.callTracksDoneCallback();
15625
15629
  if (state.controller._internals.seekSignal.getSeek() !== null) {
15626
15630
  Log.verbose(state.logLevel, "Reached end of samples, but there is a pending seek. Trying to seek...");
15627
15631
  await workOnSeekRequest(getWorkOnSeekRequestOptions(state));
@@ -16685,7 +16689,8 @@ var fetchIdx1 = async ({
16685
16689
  controller,
16686
16690
  position,
16687
16691
  logLevel,
16688
- prefetchCache
16692
+ prefetchCache,
16693
+ contentLength
16689
16694
  }) => {
16690
16695
  Log.verbose(logLevel, "Making request to fetch idx1 from ", src, "position", position);
16691
16696
  const result = await readerInterface.read({
@@ -16698,7 +16703,7 @@ var fetchIdx1 = async ({
16698
16703
  if (result.contentLength === null) {
16699
16704
  throw new Error("Content length is null");
16700
16705
  }
16701
- const iterator = getArrayBufferIterator(new Uint8Array, result.contentLength - position + 1);
16706
+ const iterator = getArrayBufferIterator(new Uint8Array, contentLength - position + 1);
16702
16707
  while (true) {
16703
16708
  const res = await result.reader.reader.read();
16704
16709
  if (res.value) {
@@ -16728,7 +16733,8 @@ var lazyIdx1Fetch = ({
16728
16733
  logLevel,
16729
16734
  readerInterface,
16730
16735
  src,
16731
- prefetchCache
16736
+ prefetchCache,
16737
+ contentLength
16732
16738
  }) => {
16733
16739
  let prom = null;
16734
16740
  let result = null;
@@ -16745,7 +16751,8 @@ var lazyIdx1Fetch = ({
16745
16751
  position,
16746
16752
  readerInterface,
16747
16753
  src,
16748
- prefetchCache
16754
+ prefetchCache,
16755
+ contentLength
16749
16756
  }).then((entries) => {
16750
16757
  prom = null;
16751
16758
  result = entries;
@@ -16941,7 +16948,8 @@ var riffSpecificState = ({
16941
16948
  logLevel,
16942
16949
  readerInterface,
16943
16950
  src,
16944
- prefetchCache
16951
+ prefetchCache,
16952
+ contentLength
16945
16953
  }) => {
16946
16954
  let avcProfile = null;
16947
16955
  let nextTrackIndex = 0;
@@ -16961,7 +16969,8 @@ var riffSpecificState = ({
16961
16969
  logLevel,
16962
16970
  readerInterface,
16963
16971
  src,
16964
- prefetchCache
16972
+ prefetchCache,
16973
+ contentLength
16965
16974
  });
16966
16975
  const sampleCounter = riffSampleCounter();
16967
16976
  const queuedBFrames = queuedBFramesState();
@@ -16999,6 +17008,7 @@ var callbacksState = ({
16999
17008
  }) => {
17000
17009
  const videoSampleCallbacks = {};
17001
17010
  const audioSampleCallbacks = {};
17011
+ const onTrackDoneCallback = {};
17002
17012
  const queuedAudioSamples = {};
17003
17013
  const queuedVideoSamples = {};
17004
17014
  const canSkipTracksState = makeCanSkipTracksState({
@@ -17033,7 +17043,8 @@ var callbacksState = ({
17033
17043
  if (seekSignal.getSeek() !== null) {
17034
17044
  Log.trace(logLevel, "Not emitting sample because seek is processing");
17035
17045
  } else {
17036
- await callback(audioSample);
17046
+ const trackDoneCallback = await callback(audioSample);
17047
+ onTrackDoneCallback[trackId] = trackDoneCallback ?? null;
17037
17048
  }
17038
17049
  }
17039
17050
  }
@@ -17054,7 +17065,8 @@ var callbacksState = ({
17054
17065
  if (seekSignal.getSeek() !== null) {
17055
17066
  Log.trace(logLevel, "Not emitting sample because seek is processing");
17056
17067
  } else {
17057
- await callback(videoSample);
17068
+ const trackDoneCallback = await callback(videoSample);
17069
+ onTrackDoneCallback[trackId] = trackDoneCallback ?? null;
17058
17070
  }
17059
17071
  }
17060
17072
  }
@@ -17090,7 +17102,14 @@ var callbacksState = ({
17090
17102
  audioSampleCallbacks,
17091
17103
  videoSampleCallbacks,
17092
17104
  hasAudioTrackHandlers,
17093
- hasVideoTrackHandlers
17105
+ hasVideoTrackHandlers,
17106
+ callTracksDoneCallback: async () => {
17107
+ for (const callback of Object.values(onTrackDoneCallback)) {
17108
+ if (callback) {
17109
+ await callback();
17110
+ }
17111
+ }
17112
+ }
17094
17113
  };
17095
17114
  };
17096
17115
 
@@ -17342,7 +17361,8 @@ var makeParserState = ({
17342
17361
  logLevel,
17343
17362
  readerInterface,
17344
17363
  src,
17345
- prefetchCache
17364
+ prefetchCache,
17365
+ contentLength
17346
17366
  }),
17347
17367
  transportStream: transportStreamState(),
17348
17368
  webm: webmState({
package/dist/index.d.ts CHANGED
@@ -1051,6 +1051,7 @@ export declare const MediaParserInternals: {
1051
1051
  videoSampleCallbacks: Record<number, import("./webcodec-sample-types").MediaParserOnVideoSample>;
1052
1052
  hasAudioTrackHandlers: boolean;
1053
1053
  hasVideoTrackHandlers: boolean;
1054
+ callTracksDoneCallback: () => Promise<void>;
1054
1055
  };
1055
1056
  getInternalStats: () => import("./state/parser-state").InternalStats;
1056
1057
  getSkipBytes: () => number;
@@ -20,7 +20,7 @@ const getArrayBufferIterator = (initialData, maxBytes) => {
20
20
  };
21
21
  const getSlice = (amount) => {
22
22
  const value = uintArray.slice(counter.getDiscardedOffset(), counter.getDiscardedOffset() + amount);
23
- counter.increment(amount);
23
+ counter.increment(value.length);
24
24
  return value;
25
25
  };
26
26
  const discard = (length) => {
@@ -117,6 +117,7 @@ const parseLoop = async ({ state, throttledState, onError, }) => {
117
117
  }
118
118
  }
119
119
  state.samplesObserved.setLastSampleObserved();
120
+ await state.callbacks.callTracksDoneCallback();
120
121
  // After the last sample, you might queue a last seek again.
121
122
  if (state.controller._internals.seekSignal.getSeek() !== null) {
122
123
  log_1.Log.verbose(state.logLevel, 'Reached end of samples, but there is a pending seek. Trying to seek...');
@@ -319,6 +319,7 @@ export declare const makeParserState: ({ hasAudioTrackHandlers, hasVideoTrackHan
319
319
  videoSampleCallbacks: Record<number, import("../webcodec-sample-types").MediaParserOnVideoSample>;
320
320
  hasAudioTrackHandlers: boolean;
321
321
  hasVideoTrackHandlers: boolean;
322
+ callTracksDoneCallback: () => Promise<void>;
322
323
  };
323
324
  getInternalStats: () => InternalStats;
324
325
  getSkipBytes: () => number;
@@ -62,6 +62,7 @@ const makeParserState = ({ hasAudioTrackHandlers, hasVideoTrackHandlers, control
62
62
  readerInterface,
63
63
  src,
64
64
  prefetchCache,
65
+ contentLength,
65
66
  }),
66
67
  transportStream: (0, transport_stream_1.transportStreamState)(),
67
68
  webm: (0, webm_1.webmState)({
@@ -4,12 +4,13 @@ import type { PrefetchCache } from '../../fetch';
4
4
  import type { MediaParserLogLevel } from '../../log';
5
5
  import type { ParseMediaSrc } from '../../options';
6
6
  import type { MediaParserReaderInterface } from '../../readers/reader';
7
- export declare const lazyIdx1Fetch: ({ controller, logLevel, readerInterface, src, prefetchCache, }: {
7
+ export declare const lazyIdx1Fetch: ({ controller, logLevel, readerInterface, src, prefetchCache, contentLength, }: {
8
8
  controller: MediaParserController;
9
9
  logLevel: MediaParserLogLevel;
10
10
  readerInterface: MediaParserReaderInterface;
11
11
  src: ParseMediaSrc;
12
12
  prefetchCache: PrefetchCache;
13
+ contentLength: number;
13
14
  }) => {
14
15
  triggerLoad: (position: number) => Promise<{
15
16
  entries: import("../../containers/riff/riff-box").Idx1Entry[];
@@ -2,7 +2,7 @@
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
3
  exports.lazyIdx1Fetch = void 0;
4
4
  const fetch_idx1_1 = require("../../containers/riff/seek/fetch-idx1");
5
- const lazyIdx1Fetch = ({ controller, logLevel, readerInterface, src, prefetchCache, }) => {
5
+ const lazyIdx1Fetch = ({ controller, logLevel, readerInterface, src, prefetchCache, contentLength, }) => {
6
6
  let prom = null;
7
7
  let result = null;
8
8
  const triggerLoad = (position) => {
@@ -19,6 +19,7 @@ const lazyIdx1Fetch = ({ controller, logLevel, readerInterface, src, prefetchCac
19
19
  readerInterface,
20
20
  src,
21
21
  prefetchCache,
22
+ contentLength,
22
23
  }).then((entries) => {
23
24
  prom = null;
24
25
  result = entries;
@@ -5,12 +5,13 @@ import type { ParseMediaSrc } from '../options';
5
5
  import type { MediaParserReaderInterface } from '../readers/reader';
6
6
  import type { SpsAndPps } from './parser-state';
7
7
  type AvcProfileInfoCallback = (profile: SpsAndPps) => Promise<void>;
8
- export declare const riffSpecificState: ({ controller, logLevel, readerInterface, src, prefetchCache, }: {
8
+ export declare const riffSpecificState: ({ controller, logLevel, readerInterface, src, prefetchCache, contentLength, }: {
9
9
  controller: MediaParserController;
10
10
  logLevel: MediaParserLogLevel;
11
11
  readerInterface: MediaParserReaderInterface;
12
12
  src: ParseMediaSrc;
13
13
  prefetchCache: PrefetchCache;
14
+ contentLength: number;
14
15
  }) => {
15
16
  getAvcProfile: () => SpsAndPps | null;
16
17
  onProfile: (profile: SpsAndPps) => Promise<void>;
@@ -4,7 +4,7 @@ exports.riffSpecificState = void 0;
4
4
  const lazy_idx1_fetch_1 = require("./riff/lazy-idx1-fetch");
5
5
  const queued_frames_1 = require("./riff/queued-frames");
6
6
  const sample_counter_1 = require("./riff/sample-counter");
7
- const riffSpecificState = ({ controller, logLevel, readerInterface, src, prefetchCache, }) => {
7
+ const riffSpecificState = ({ controller, logLevel, readerInterface, src, prefetchCache, contentLength, }) => {
8
8
  let avcProfile = null;
9
9
  let nextTrackIndex = 0;
10
10
  const profileCallbacks = [];
@@ -24,6 +24,7 @@ const riffSpecificState = ({ controller, logLevel, readerInterface, src, prefetc
24
24
  readerInterface,
25
25
  src,
26
26
  prefetchCache,
27
+ contentLength,
27
28
  });
28
29
  const sampleCounter = (0, sample_counter_1.riffSampleCounter)();
29
30
  const queuedBFrames = (0, queued_frames_1.queuedBFramesState)();
@@ -46,5 +46,6 @@ export declare const callbacksState: ({ controller, hasAudioTrackHandlers, hasVi
46
46
  videoSampleCallbacks: Record<number, MediaParserOnVideoSample>;
47
47
  hasAudioTrackHandlers: boolean;
48
48
  hasVideoTrackHandlers: boolean;
49
+ callTracksDoneCallback: () => Promise<void>;
49
50
  };
50
51
  export type CallbacksState = ReturnType<typeof callbacksState>;
@@ -9,6 +9,7 @@ const need_samples_for_fields_1 = require("./need-samples-for-fields");
9
9
  const callbacksState = ({ controller, hasAudioTrackHandlers, hasVideoTrackHandlers, fields, keyframes, emittedFields, samplesObserved, structure, src, seekSignal, logLevel, }) => {
10
10
  const videoSampleCallbacks = {};
11
11
  const audioSampleCallbacks = {};
12
+ const onTrackDoneCallback = {};
12
13
  const queuedAudioSamples = {};
13
14
  const queuedVideoSamples = {};
14
15
  const canSkipTracksState = (0, can_skip_tracks_1.makeCanSkipTracksState)({
@@ -43,7 +44,8 @@ const callbacksState = ({ controller, hasAudioTrackHandlers, hasVideoTrackHandle
43
44
  log_1.Log.trace(logLevel, 'Not emitting sample because seek is processing');
44
45
  }
45
46
  else {
46
- await callback(audioSample);
47
+ const trackDoneCallback = await callback(audioSample);
48
+ onTrackDoneCallback[trackId] = trackDoneCallback !== null && trackDoneCallback !== void 0 ? trackDoneCallback : null;
47
49
  }
48
50
  }
49
51
  }
@@ -63,7 +65,8 @@ const callbacksState = ({ controller, hasAudioTrackHandlers, hasVideoTrackHandle
63
65
  log_1.Log.trace(logLevel, 'Not emitting sample because seek is processing');
64
66
  }
65
67
  else {
66
- await callback(videoSample);
68
+ const trackDoneCallback = await callback(videoSample);
69
+ onTrackDoneCallback[trackId] = trackDoneCallback !== null && trackDoneCallback !== void 0 ? trackDoneCallback : null;
67
70
  }
68
71
  }
69
72
  }
@@ -101,6 +104,13 @@ const callbacksState = ({ controller, hasAudioTrackHandlers, hasVideoTrackHandle
101
104
  videoSampleCallbacks,
102
105
  hasAudioTrackHandlers,
103
106
  hasVideoTrackHandlers,
107
+ callTracksDoneCallback: async () => {
108
+ for (const callback of Object.values(onTrackDoneCallback)) {
109
+ if (callback) {
110
+ await callback();
111
+ }
112
+ }
113
+ },
104
114
  };
105
115
  };
106
116
  exports.callbacksState = callbacksState;
package/dist/version.d.ts CHANGED
@@ -1 +1 @@
1
- export declare const VERSION = "4.0.305";
1
+ export declare const VERSION = "4.0.308";
package/dist/version.js CHANGED
@@ -2,4 +2,4 @@
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
3
  exports.VERSION = void 0;
4
4
  // Automatically generated on publish
5
- exports.VERSION = '4.0.305';
5
+ exports.VERSION = '4.0.308';
@@ -1,7 +1,8 @@
1
1
  import type { MediaParserAudioTrack, MediaParserVideoTrack } from './get-tracks';
2
2
  import type { MediaParserContainer } from './options';
3
- export type MediaParserOnAudioSample = (sample: MediaParserAudioSample) => void | Promise<void>;
4
- export type MediaParserOnVideoSample = (sample: MediaParserVideoSample) => void | Promise<void>;
3
+ export type MediaParserOnAudioSample = (sample: MediaParserAudioSample) => void | Promise<void> | OnTrackDoneCallback | Promise<OnTrackDoneCallback>;
4
+ export type MediaParserOnVideoSample = (sample: MediaParserVideoSample) => void | Promise<void> | OnTrackDoneCallback | Promise<OnTrackDoneCallback>;
5
+ export type OnTrackDoneCallback = () => void | Promise<void>;
5
6
  export type MediaParserOnAudioTrackParams = {
6
7
  track: MediaParserAudioTrack;
7
8
  container: MediaParserContainer;
package/package.json CHANGED
@@ -3,15 +3,15 @@
3
3
  "url": "https://github.com/remotion-dev/remotion/tree/main/packages/media-parser"
4
4
  },
5
5
  "name": "@remotion/media-parser",
6
- "version": "4.0.305",
6
+ "version": "4.0.308",
7
7
  "main": "dist/index.js",
8
8
  "sideEffects": false,
9
9
  "devDependencies": {
10
10
  "@types/wicg-file-system-access": "2023.10.5",
11
11
  "eslint": "9.19.0",
12
12
  "@types/bun": "1.2.8",
13
- "@remotion/example-videos": "4.0.305",
14
- "@remotion/eslint-config-internal": "4.0.305"
13
+ "@remotion/example-videos": "4.0.308",
14
+ "@remotion/eslint-config-internal": "4.0.308"
15
15
  },
16
16
  "publishConfig": {
17
17
  "access": "public"
@@ -1,9 +0,0 @@
1
- import type { MediaParserVideoSample } from '../../webcodec-sample-types';
2
- export type QueuedVideoSample = Omit<MediaParserVideoSample, 'cts' | 'dts' | 'timestamp'>;
3
- export declare const queuedBFramesState: () => {
4
- addFrame: (frame: QueuedVideoSample, maxFramesInBuffer: number) => void;
5
- flush: () => void;
6
- getReleasedFrame: () => QueuedVideoSample | null;
7
- hasReleasedFrames: () => boolean;
8
- clear: () => void;
9
- };
@@ -1,47 +0,0 @@
1
- "use strict";
2
- Object.defineProperty(exports, "__esModule", { value: true });
3
- exports.queuedBFramesState = void 0;
4
- const queuedBFramesState = () => {
5
- const queuedFrames = [];
6
- const releasedFrames = [];
7
- const sortFrames = () => {
8
- queuedFrames.sort((a, b) => {
9
- if (!a.avc || !b.avc || a.avc.poc === null || b.avc.poc === null) {
10
- throw new Error('Invalid frame');
11
- }
12
- return a.avc.poc - b.avc.poc;
13
- });
14
- };
15
- const flush = () => {
16
- sortFrames();
17
- releasedFrames.push(...queuedFrames);
18
- queuedFrames.length = 0;
19
- };
20
- return {
21
- addFrame: (frame, maxFramesInBuffer) => {
22
- if (frame.type === 'key') {
23
- flush();
24
- }
25
- queuedFrames.push(frame);
26
- if (queuedFrames.length > maxFramesInBuffer) {
27
- sortFrames();
28
- releasedFrames.push(queuedFrames.shift());
29
- }
30
- },
31
- flush,
32
- getReleasedFrame: () => {
33
- if (releasedFrames.length === 0) {
34
- return null;
35
- }
36
- return releasedFrames.shift();
37
- },
38
- hasReleasedFrames: () => {
39
- return releasedFrames.length > 0;
40
- },
41
- clear: () => {
42
- releasedFrames.length = 0;
43
- queuedFrames.length = 0;
44
- },
45
- };
46
- };
47
- exports.queuedBFramesState = queuedBFramesState;