@remotion/media-parser 4.0.288 → 4.0.290
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/containers/aac/get-seeking-byte.d.ts +6 -0
- package/dist/containers/aac/get-seeking-byte.js +30 -0
- package/dist/containers/aac/parse-aac.js +23 -18
- package/dist/containers/aac/seeking-hints.d.ts +13 -0
- package/dist/containers/aac/seeking-hints.js +14 -0
- package/dist/containers/flac/get-channel-count.d.ts +1 -1
- package/dist/containers/flac/get-seeking-byte.d.ts +1 -2
- package/dist/containers/flac/get-seeking-byte.js +6 -2
- package/dist/containers/flac/parse-flac-frame.js +18 -17
- package/dist/containers/flac/parse-flac.js +5 -25
- package/dist/containers/flac/seeking-hints.d.ts +4 -1
- package/dist/containers/flac/seeking-hints.js +2 -1
- package/dist/containers/iso-base-media/get-children.d.ts +2 -1
- package/dist/containers/iso-base-media/get-children.js +2 -1
- package/dist/containers/iso-base-media/get-mfra-seeking-box.js +1 -0
- package/dist/containers/iso-base-media/get-moov-atom.js +2 -1
- package/dist/containers/iso-base-media/mdat/mdat.js +26 -33
- package/dist/containers/iso-base-media/moov/moov.d.ts +2 -1
- package/dist/containers/iso-base-media/moov/moov.js +2 -1
- package/dist/containers/iso-base-media/parse-boxes.js +1 -0
- package/dist/containers/iso-base-media/process-box.d.ts +2 -1
- package/dist/containers/iso-base-media/process-box.js +10 -4
- package/dist/containers/iso-base-media/seeking-hints.d.ts +1 -1
- package/dist/containers/iso-base-media/stsd/mebx.d.ts +2 -1
- package/dist/containers/iso-base-media/stsd/mebx.js +2 -1
- package/dist/containers/iso-base-media/stsd/samples.d.ts +4 -2
- package/dist/containers/iso-base-media/stsd/samples.js +7 -2
- package/dist/containers/iso-base-media/stsd/stsd.d.ts +2 -1
- package/dist/containers/iso-base-media/stsd/stsd.js +2 -1
- package/dist/containers/iso-base-media/trak/trak.d.ts +2 -1
- package/dist/containers/iso-base-media/trak/trak.js +2 -1
- package/dist/containers/mp3/audio-sample-from-cbr.d.ts +11 -0
- package/dist/containers/mp3/audio-sample-from-cbr.js +35 -0
- package/dist/containers/mp3/get-duration.d.ts +5 -0
- package/dist/containers/mp3/get-duration.js +33 -6
- package/dist/containers/mp3/get-seeking-byte.d.ts +6 -0
- package/dist/containers/mp3/get-seeking-byte.js +49 -0
- package/dist/containers/mp3/parse-mp3.js +9 -0
- package/dist/containers/mp3/parse-mpeg-header.js +74 -263
- package/dist/containers/mp3/parse-packet-header.d.ts +30 -0
- package/dist/containers/mp3/parse-packet-header.js +258 -0
- package/dist/containers/mp3/parse-xing.d.ts +19 -0
- package/dist/containers/mp3/parse-xing.js +120 -0
- package/dist/containers/mp3/seek/audio-sample-from-cbr.d.ts +16 -0
- package/dist/containers/mp3/seek/audio-sample-from-cbr.js +35 -0
- package/dist/containers/mp3/seek/audio-sample-from-vbr.d.ts +8 -0
- package/dist/containers/mp3/seek/audio-sample-from-vbr.js +47 -0
- package/dist/containers/mp3/seek/get-approximate-byte-from-bitrate.d.ts +9 -0
- package/dist/containers/mp3/seek/get-approximate-byte-from-bitrate.js +28 -0
- package/dist/containers/mp3/seek/get-byte-from-observed-samples.d.ts +6 -0
- package/dist/containers/mp3/seek/get-byte-from-observed-samples.js +27 -0
- package/dist/containers/mp3/seek/get-seek-point-from-xing.d.ts +7 -0
- package/dist/containers/mp3/seek/get-seek-point-from-xing.js +29 -0
- package/dist/containers/mp3/seek/wait-until-syncword.d.ts +4 -0
- package/dist/containers/mp3/seek/wait-until-syncword.js +25 -0
- package/dist/containers/mp3/seeking-hints.d.ts +24 -0
- package/dist/containers/mp3/seeking-hints.js +21 -0
- package/dist/containers/riff/expect-riff-box.d.ts +6 -1
- package/dist/containers/riff/expect-riff-box.js +37 -27
- package/dist/containers/riff/get-seeking-byte.d.ts +8 -0
- package/dist/containers/riff/get-seeking-byte.js +56 -0
- package/dist/containers/riff/has-index.d.ts +2 -0
- package/dist/containers/riff/has-index.js +8 -0
- package/dist/containers/riff/parse-avih.js +3 -0
- package/dist/containers/riff/parse-idx1.d.ts +6 -0
- package/dist/containers/riff/parse-idx1.js +47 -0
- package/dist/containers/riff/parse-list-box.d.ts +4 -2
- package/dist/containers/riff/parse-list-box.js +8 -3
- package/dist/containers/riff/parse-movi.js +35 -40
- package/dist/containers/riff/parse-riff-body.js +5 -1
- package/dist/containers/riff/parse-riff-box.d.ts +4 -2
- package/dist/containers/riff/parse-riff-box.js +10 -3
- package/dist/containers/riff/riff-box.d.ts +14 -1
- package/dist/containers/riff/seek/fetch-idx1.d.ts +15 -0
- package/dist/containers/riff/seek/fetch-idx1.js +38 -0
- package/dist/containers/riff/seeking-hints.d.ts +23 -0
- package/dist/containers/riff/seeking-hints.js +36 -0
- package/dist/containers/transport-stream/handle-aac-packet.js +4 -8
- package/dist/containers/transport-stream/handle-avc-packet.js +4 -8
- package/dist/containers/wav/get-duration-from-wav.d.ts +0 -1
- package/dist/containers/wav/get-duration-from-wav.js +1 -10
- package/dist/containers/wav/parse-media-section.js +14 -18
- package/dist/containers/webm/parse-ebml.js +3 -16
- package/dist/containers/webm/seek/seeking-hints.js +1 -1
- package/dist/emit-available-info.js +8 -8
- package/dist/esm/index.mjs +1479 -383
- package/dist/esm/worker-server-entry.mjs +1475 -379
- package/dist/esm/worker-web-entry.mjs +1475 -379
- package/dist/find-last-keyframe.d.ts +5 -0
- package/dist/find-last-keyframe.js +18 -0
- package/dist/get-seeking-byte.d.ts +3 -1
- package/dist/get-seeking-byte.js +45 -7
- package/dist/get-seeking-hints.d.ts +12 -1
- package/dist/get-seeking-hints.js +40 -9
- package/dist/index.d.ts +56 -8
- package/dist/internal-parse-media.js +6 -0
- package/dist/parse-loop.js +15 -0
- package/dist/seeking-hints.d.ts +5 -1
- package/dist/set-seeking-hints.js +28 -8
- package/dist/state/aac-state.d.ts +6 -0
- package/dist/state/aac-state.js +7 -2
- package/dist/state/flac-state.d.ts +6 -0
- package/dist/state/flac-state.js +3 -0
- package/dist/state/keyframes.d.ts +1 -2
- package/dist/state/keyframes.js +2 -2
- package/dist/state/matroska/lazy-cues-fetch.js +13 -1
- package/dist/state/mp3.d.ts +16 -5
- package/dist/state/mp3.js +7 -5
- package/dist/state/parser-state.d.ts +52 -6
- package/dist/state/parser-state.js +6 -6
- package/dist/state/riff/lazy-idx1-fetch.d.ts +30 -0
- package/dist/state/riff/lazy-idx1-fetch.js +63 -0
- package/dist/state/riff/riff-keyframes.d.ts +10 -0
- package/dist/state/riff/riff-keyframes.js +26 -0
- package/dist/state/riff/sample-counter.d.ts +12 -0
- package/dist/state/riff/sample-counter.js +52 -0
- package/dist/state/riff.d.ts +41 -1
- package/dist/state/riff.js +12 -1
- package/dist/state/sample-callbacks.d.ts +3 -4
- package/dist/state/sample-callbacks.js +3 -16
- package/dist/state/samples-observed/slow-duration-fps.d.ts +3 -1
- package/dist/state/samples-observed/slow-duration-fps.js +7 -0
- package/dist/version.d.ts +1 -1
- package/dist/version.js +1 -1
- package/dist/work-on-seek-request.d.ts +10 -0
- package/dist/work-on-seek-request.js +20 -2
- package/package.json +3 -3
|
@@ -4909,24 +4909,45 @@ var getSamplesPerMpegFrame = ({
|
|
|
4909
4909
|
};
|
|
4910
4910
|
|
|
4911
4911
|
// src/containers/mp3/get-duration.ts
|
|
4912
|
+
var getDurationFromMp3Xing = ({
|
|
4913
|
+
xingData,
|
|
4914
|
+
samplesPerFrame
|
|
4915
|
+
}) => {
|
|
4916
|
+
const xingFrames = xingData.numberOfFrames;
|
|
4917
|
+
if (!xingFrames) {
|
|
4918
|
+
throw new Error("Cannot get duration of VBR MP3 file - no frames");
|
|
4919
|
+
}
|
|
4920
|
+
const { sampleRate } = xingData;
|
|
4921
|
+
if (!sampleRate) {
|
|
4922
|
+
throw new Error("Cannot get duration of VBR MP3 file - no sample rate");
|
|
4923
|
+
}
|
|
4924
|
+
const xingSamples = xingFrames * samplesPerFrame;
|
|
4925
|
+
return xingSamples / sampleRate;
|
|
4926
|
+
};
|
|
4912
4927
|
var getDurationFromMp3 = (state) => {
|
|
4913
|
-
const mp3Info = state.
|
|
4914
|
-
const
|
|
4915
|
-
if (!mp3Info || !
|
|
4928
|
+
const mp3Info = state.mp3.getMp3Info();
|
|
4929
|
+
const mp3BitrateInfo = state.mp3.getMp3BitrateInfo();
|
|
4930
|
+
if (!mp3Info || !mp3BitrateInfo) {
|
|
4916
4931
|
return null;
|
|
4917
4932
|
}
|
|
4918
4933
|
const samplesPerFrame = getSamplesPerMpegFrame({
|
|
4919
4934
|
layer: mp3Info.layer,
|
|
4920
4935
|
mpegVersion: mp3Info.mpegVersion
|
|
4921
4936
|
});
|
|
4937
|
+
if (mp3BitrateInfo.type === "variable") {
|
|
4938
|
+
return getDurationFromMp3Xing({
|
|
4939
|
+
xingData: mp3BitrateInfo.xingData,
|
|
4940
|
+
samplesPerFrame
|
|
4941
|
+
});
|
|
4942
|
+
}
|
|
4922
4943
|
const frameLengthInBytes = getMpegFrameLength({
|
|
4923
|
-
bitrateKbit:
|
|
4944
|
+
bitrateKbit: mp3BitrateInfo.bitrateInKbit,
|
|
4924
4945
|
padding: false,
|
|
4925
4946
|
samplesPerFrame,
|
|
4926
4947
|
samplingFrequency: mp3Info.sampleRate,
|
|
4927
4948
|
layer: mp3Info.layer
|
|
4928
4949
|
});
|
|
4929
|
-
const frames = Math.floor((state.contentLength -
|
|
4950
|
+
const frames = Math.floor((state.contentLength - state.mediaSection.getMediaSectionAssertOnlyOne().start) / frameLengthInBytes);
|
|
4930
4951
|
const samples = frames * samplesPerFrame;
|
|
4931
4952
|
const durationInSeconds = samples / mp3Info.sampleRate;
|
|
4932
4953
|
return durationInSeconds;
|
|
@@ -5465,6 +5486,60 @@ var hasSampleRate = (state) => {
|
|
|
5465
5486
|
return state.callbacks.tracks.hasAllTracks();
|
|
5466
5487
|
};
|
|
5467
5488
|
|
|
5489
|
+
// src/containers/aac/get-seeking-byte.ts
|
|
5490
|
+
var getSeekingByteForAac = ({
|
|
5491
|
+
time,
|
|
5492
|
+
seekingHints
|
|
5493
|
+
}) => {
|
|
5494
|
+
let bestAudioSample;
|
|
5495
|
+
for (const hint of seekingHints.audioSampleMap) {
|
|
5496
|
+
if (hint.timeInSeconds > time) {
|
|
5497
|
+
continue;
|
|
5498
|
+
}
|
|
5499
|
+
if (hint.timeInSeconds + hint.durationInSeconds < time && !seekingHints.lastSampleObserved) {
|
|
5500
|
+
continue;
|
|
5501
|
+
}
|
|
5502
|
+
if (!bestAudioSample) {
|
|
5503
|
+
bestAudioSample = hint;
|
|
5504
|
+
continue;
|
|
5505
|
+
}
|
|
5506
|
+
if (bestAudioSample.timeInSeconds < hint.timeInSeconds) {
|
|
5507
|
+
bestAudioSample = hint;
|
|
5508
|
+
}
|
|
5509
|
+
}
|
|
5510
|
+
if (bestAudioSample) {
|
|
5511
|
+
return { type: "do-seek", byte: bestAudioSample.offset };
|
|
5512
|
+
}
|
|
5513
|
+
return { type: "valid-but-must-wait" };
|
|
5514
|
+
};
|
|
5515
|
+
|
|
5516
|
+
// src/containers/flac/get-seeking-byte.ts
|
|
5517
|
+
var getSeekingByteForFlac = ({
|
|
5518
|
+
time,
|
|
5519
|
+
seekingHints
|
|
5520
|
+
}) => {
|
|
5521
|
+
let bestAudioSample;
|
|
5522
|
+
for (const hint of seekingHints.audioSampleMap) {
|
|
5523
|
+
if (hint.timeInSeconds > time) {
|
|
5524
|
+
continue;
|
|
5525
|
+
}
|
|
5526
|
+
if (hint.timeInSeconds + hint.durationInSeconds < time && !seekingHints.lastSampleObserved) {
|
|
5527
|
+
continue;
|
|
5528
|
+
}
|
|
5529
|
+
if (!bestAudioSample) {
|
|
5530
|
+
bestAudioSample = hint;
|
|
5531
|
+
continue;
|
|
5532
|
+
}
|
|
5533
|
+
if (bestAudioSample.timeInSeconds < hint.timeInSeconds) {
|
|
5534
|
+
bestAudioSample = hint;
|
|
5535
|
+
}
|
|
5536
|
+
}
|
|
5537
|
+
if (bestAudioSample) {
|
|
5538
|
+
return bestAudioSample.offset;
|
|
5539
|
+
}
|
|
5540
|
+
return null;
|
|
5541
|
+
};
|
|
5542
|
+
|
|
5468
5543
|
// src/state/video-section.ts
|
|
5469
5544
|
var isByteInMediaSection = ({
|
|
5470
5545
|
position,
|
|
@@ -5734,6 +5809,312 @@ var getSeekingByteFromIsoBaseMedia = async ({
|
|
|
5734
5809
|
};
|
|
5735
5810
|
};
|
|
5736
5811
|
|
|
5812
|
+
// src/containers/mp3/seek/get-approximate-byte-from-bitrate.ts
|
|
5813
|
+
var getApproximateByteFromBitrate = ({
|
|
5814
|
+
mp3BitrateInfo,
|
|
5815
|
+
timeInSeconds,
|
|
5816
|
+
mp3Info,
|
|
5817
|
+
mediaSection,
|
|
5818
|
+
contentLength
|
|
5819
|
+
}) => {
|
|
5820
|
+
if (mp3BitrateInfo.type === "variable") {
|
|
5821
|
+
return null;
|
|
5822
|
+
}
|
|
5823
|
+
const samplesPerFrame = getSamplesPerMpegFrame({
|
|
5824
|
+
layer: mp3Info.layer,
|
|
5825
|
+
mpegVersion: mp3Info.mpegVersion
|
|
5826
|
+
});
|
|
5827
|
+
const frameLengthInBytes = getMpegFrameLength({
|
|
5828
|
+
bitrateKbit: mp3BitrateInfo.bitrateInKbit,
|
|
5829
|
+
padding: false,
|
|
5830
|
+
samplesPerFrame,
|
|
5831
|
+
samplingFrequency: mp3Info.sampleRate,
|
|
5832
|
+
layer: mp3Info.layer
|
|
5833
|
+
});
|
|
5834
|
+
const frameIndexUnclamped = Math.floor(timeInSeconds * mp3Info.sampleRate / samplesPerFrame);
|
|
5835
|
+
const frames = Math.floor((contentLength - mediaSection.start) / frameLengthInBytes);
|
|
5836
|
+
const frameIndex = Math.min(frames - 1, frameIndexUnclamped);
|
|
5837
|
+
const byteRelativeToMediaSection = frameIndex * frameLengthInBytes;
|
|
5838
|
+
const byteBeforeFrame = byteRelativeToMediaSection + mediaSection.start;
|
|
5839
|
+
return byteBeforeFrame;
|
|
5840
|
+
};
|
|
5841
|
+
|
|
5842
|
+
// src/containers/mp3/seek/get-byte-from-observed-samples.ts
|
|
5843
|
+
var getByteFromObservedSamples = ({
|
|
5844
|
+
info,
|
|
5845
|
+
timeInSeconds
|
|
5846
|
+
}) => {
|
|
5847
|
+
let bestAudioSample;
|
|
5848
|
+
for (const hint of info.audioSampleMap) {
|
|
5849
|
+
if (hint.timeInSeconds > timeInSeconds) {
|
|
5850
|
+
continue;
|
|
5851
|
+
}
|
|
5852
|
+
if (hint.timeInSeconds + hint.durationInSeconds < timeInSeconds && !info.lastSampleObserved) {
|
|
5853
|
+
continue;
|
|
5854
|
+
}
|
|
5855
|
+
if (!bestAudioSample) {
|
|
5856
|
+
bestAudioSample = hint;
|
|
5857
|
+
continue;
|
|
5858
|
+
}
|
|
5859
|
+
if (bestAudioSample.timeInSeconds < hint.timeInSeconds) {
|
|
5860
|
+
bestAudioSample = hint;
|
|
5861
|
+
}
|
|
5862
|
+
}
|
|
5863
|
+
return bestAudioSample;
|
|
5864
|
+
};
|
|
5865
|
+
|
|
5866
|
+
// src/containers/mp3/parse-xing.ts
|
|
5867
|
+
var SAMPLE_RATES = [44100, 48000, 32000, 99999];
|
|
5868
|
+
var FRAMES_FLAG = 1;
|
|
5869
|
+
var BYTES_FLAG = 2;
|
|
5870
|
+
var TOC_FLAG = 4;
|
|
5871
|
+
var VBR_SCALE_FLAG = 8;
|
|
5872
|
+
var extractI4 = (data, offset) => {
|
|
5873
|
+
let x = 0;
|
|
5874
|
+
x = data[offset];
|
|
5875
|
+
x <<= 8;
|
|
5876
|
+
x |= data[offset + 1];
|
|
5877
|
+
x <<= 8;
|
|
5878
|
+
x |= data[offset + 2];
|
|
5879
|
+
x <<= 8;
|
|
5880
|
+
x |= data[offset + 3];
|
|
5881
|
+
return x;
|
|
5882
|
+
};
|
|
5883
|
+
var parseXing = (data) => {
|
|
5884
|
+
const h_id = data[1] >> 3 & 1;
|
|
5885
|
+
const h_sr_index = data[2] >> 2 & 3;
|
|
5886
|
+
const h_mode = data[3] >> 6 & 3;
|
|
5887
|
+
let xingOffset = 0;
|
|
5888
|
+
if (h_id) {
|
|
5889
|
+
if (h_mode !== 3) {
|
|
5890
|
+
xingOffset += 32 + 4;
|
|
5891
|
+
} else {
|
|
5892
|
+
xingOffset += 17 + 4;
|
|
5893
|
+
}
|
|
5894
|
+
} else if (h_mode !== 3) {
|
|
5895
|
+
xingOffset += 17 + 4;
|
|
5896
|
+
} else {
|
|
5897
|
+
xingOffset += 9 + 4;
|
|
5898
|
+
}
|
|
5899
|
+
const expectXing = new TextDecoder("utf8").decode(data.slice(xingOffset, xingOffset + 4));
|
|
5900
|
+
if (expectXing !== "Xing") {
|
|
5901
|
+
throw new Error("Invalid Xing header");
|
|
5902
|
+
}
|
|
5903
|
+
let sampleRate = SAMPLE_RATES[h_sr_index];
|
|
5904
|
+
if (h_id === 0) {
|
|
5905
|
+
sampleRate >>= 1;
|
|
5906
|
+
}
|
|
5907
|
+
let offset = xingOffset + 4;
|
|
5908
|
+
const flags = extractI4(data, offset);
|
|
5909
|
+
offset += 4;
|
|
5910
|
+
let numberOfFrames;
|
|
5911
|
+
let fileSize;
|
|
5912
|
+
let tableOfContents;
|
|
5913
|
+
let vbrScale;
|
|
5914
|
+
if (flags & FRAMES_FLAG) {
|
|
5915
|
+
numberOfFrames = extractI4(data, offset);
|
|
5916
|
+
offset += 4;
|
|
5917
|
+
}
|
|
5918
|
+
if (flags & BYTES_FLAG) {
|
|
5919
|
+
fileSize = extractI4(data, offset);
|
|
5920
|
+
offset += 4;
|
|
5921
|
+
}
|
|
5922
|
+
if (flags & TOC_FLAG) {
|
|
5923
|
+
tableOfContents = data.slice(offset, offset + 100);
|
|
5924
|
+
offset += 100;
|
|
5925
|
+
}
|
|
5926
|
+
if (flags & VBR_SCALE_FLAG) {
|
|
5927
|
+
vbrScale = extractI4(data, offset);
|
|
5928
|
+
offset += 4;
|
|
5929
|
+
}
|
|
5930
|
+
if (offset !== data.length) {
|
|
5931
|
+
throw new Error("xing header was parsed wrong: " + JSON.stringify(data));
|
|
5932
|
+
}
|
|
5933
|
+
return {
|
|
5934
|
+
sampleRate,
|
|
5935
|
+
numberOfFrames: numberOfFrames ?? null,
|
|
5936
|
+
fileSize: fileSize ?? null,
|
|
5937
|
+
tableOfContents: tableOfContents ? Array.from(tableOfContents.slice(0, 100)) : null,
|
|
5938
|
+
vbrScale: vbrScale ?? null
|
|
5939
|
+
};
|
|
5940
|
+
};
|
|
5941
|
+
var getSeekPointInBytes = ({
|
|
5942
|
+
fileSize,
|
|
5943
|
+
percentBetween0And100,
|
|
5944
|
+
tableOfContents
|
|
5945
|
+
}) => {
|
|
5946
|
+
let index = Math.floor(percentBetween0And100);
|
|
5947
|
+
if (index > 99) {
|
|
5948
|
+
index = 99;
|
|
5949
|
+
}
|
|
5950
|
+
const fa = tableOfContents[index];
|
|
5951
|
+
let fb;
|
|
5952
|
+
if (index < 99) {
|
|
5953
|
+
fb = tableOfContents[index + 1];
|
|
5954
|
+
} else {
|
|
5955
|
+
fb = 256;
|
|
5956
|
+
}
|
|
5957
|
+
const fx = fa + (fb - fa) * (percentBetween0And100 - index);
|
|
5958
|
+
const seekPoint = 1 / 256 * fx * fileSize;
|
|
5959
|
+
return Math.floor(seekPoint);
|
|
5960
|
+
};
|
|
5961
|
+
var getTimeFromPosition = ({
|
|
5962
|
+
position,
|
|
5963
|
+
fileSize,
|
|
5964
|
+
tableOfContents,
|
|
5965
|
+
durationInSeconds
|
|
5966
|
+
}) => {
|
|
5967
|
+
const positionNormalized = position / fileSize * 256;
|
|
5968
|
+
let index = 0;
|
|
5969
|
+
while (index < 99 && tableOfContents[index + 1] <= positionNormalized) {
|
|
5970
|
+
index++;
|
|
5971
|
+
}
|
|
5972
|
+
const fa = tableOfContents[index];
|
|
5973
|
+
const fb = index < 99 ? tableOfContents[index + 1] : 256;
|
|
5974
|
+
const percentWithinSegment = (positionNormalized - fa) / (fb - fa);
|
|
5975
|
+
const percentBetween0And100 = index + percentWithinSegment;
|
|
5976
|
+
return percentBetween0And100 / 100 * durationInSeconds;
|
|
5977
|
+
};
|
|
5978
|
+
|
|
5979
|
+
// src/containers/mp3/seek/get-seek-point-from-xing.ts
|
|
5980
|
+
var getSeekPointFromXing = ({
|
|
5981
|
+
timeInSeconds,
|
|
5982
|
+
xingData,
|
|
5983
|
+
mp3Info
|
|
5984
|
+
}) => {
|
|
5985
|
+
const samplesPerFrame = getSamplesPerMpegFrame({
|
|
5986
|
+
layer: mp3Info.layer,
|
|
5987
|
+
mpegVersion: mp3Info.mpegVersion
|
|
5988
|
+
});
|
|
5989
|
+
const duration2 = getDurationFromMp3Xing({
|
|
5990
|
+
xingData,
|
|
5991
|
+
samplesPerFrame
|
|
5992
|
+
});
|
|
5993
|
+
const totalSamples = timeInSeconds * xingData.sampleRate;
|
|
5994
|
+
const oneFrameSubtracted = totalSamples - samplesPerFrame;
|
|
5995
|
+
const timeToTarget = Math.max(0, oneFrameSubtracted / xingData.sampleRate);
|
|
5996
|
+
if (!xingData.fileSize || !xingData.tableOfContents) {
|
|
5997
|
+
throw new Error("Cannot seek of VBR MP3 file");
|
|
5998
|
+
}
|
|
5999
|
+
return getSeekPointInBytes({
|
|
6000
|
+
fileSize: xingData.fileSize,
|
|
6001
|
+
percentBetween0And100: timeToTarget / duration2 * 100,
|
|
6002
|
+
tableOfContents: xingData.tableOfContents
|
|
6003
|
+
});
|
|
6004
|
+
};
|
|
6005
|
+
|
|
6006
|
+
// src/containers/mp3/get-seeking-byte.ts
|
|
6007
|
+
var getSeekingByteForMp3 = ({
|
|
6008
|
+
time,
|
|
6009
|
+
info
|
|
6010
|
+
}) => {
|
|
6011
|
+
if (info.mp3BitrateInfo === null || info.mp3Info === null || info.mediaSection === null) {
|
|
6012
|
+
return {
|
|
6013
|
+
type: "valid-but-must-wait"
|
|
6014
|
+
};
|
|
6015
|
+
}
|
|
6016
|
+
const approximateByte = getApproximateByteFromBitrate({
|
|
6017
|
+
mp3BitrateInfo: info.mp3BitrateInfo,
|
|
6018
|
+
timeInSeconds: time,
|
|
6019
|
+
mp3Info: info.mp3Info,
|
|
6020
|
+
mediaSection: info.mediaSection,
|
|
6021
|
+
contentLength: info.contentLength
|
|
6022
|
+
});
|
|
6023
|
+
const bestAudioSample = getByteFromObservedSamples({
|
|
6024
|
+
info,
|
|
6025
|
+
timeInSeconds: time
|
|
6026
|
+
});
|
|
6027
|
+
const xingSeekPoint = info.mp3BitrateInfo.type === "variable" ? getSeekPointFromXing({
|
|
6028
|
+
mp3Info: info.mp3Info,
|
|
6029
|
+
timeInSeconds: time,
|
|
6030
|
+
xingData: info.mp3BitrateInfo.xingData
|
|
6031
|
+
}) : null;
|
|
6032
|
+
const candidates = [
|
|
6033
|
+
approximateByte,
|
|
6034
|
+
bestAudioSample?.offset ?? null,
|
|
6035
|
+
xingSeekPoint
|
|
6036
|
+
].filter((b) => b !== null);
|
|
6037
|
+
if (candidates.length === 0) {
|
|
6038
|
+
return {
|
|
6039
|
+
type: "valid-but-must-wait"
|
|
6040
|
+
};
|
|
6041
|
+
}
|
|
6042
|
+
return {
|
|
6043
|
+
type: "do-seek",
|
|
6044
|
+
byte: Math.max(...candidates)
|
|
6045
|
+
};
|
|
6046
|
+
};
|
|
6047
|
+
|
|
6048
|
+
// src/find-last-keyframe.ts
|
|
6049
|
+
function findLastKeyframe({
|
|
6050
|
+
keyframes,
|
|
6051
|
+
timeInSeconds
|
|
6052
|
+
}) {
|
|
6053
|
+
let bestKeyframe = null;
|
|
6054
|
+
for (const keyframe of keyframes) {
|
|
6055
|
+
if (keyframe.presentationTimeInSeconds > timeInSeconds && keyframe.decodingTimeInSeconds > timeInSeconds) {
|
|
6056
|
+
break;
|
|
6057
|
+
}
|
|
6058
|
+
if (bestKeyframe === null || keyframe.presentationTimeInSeconds > bestKeyframe.presentationTimeInSeconds) {
|
|
6059
|
+
bestKeyframe = keyframe;
|
|
6060
|
+
}
|
|
6061
|
+
}
|
|
6062
|
+
return bestKeyframe;
|
|
6063
|
+
}
|
|
6064
|
+
|
|
6065
|
+
// src/containers/riff/get-seeking-byte.ts
|
|
6066
|
+
var getSeekingByteForRiff = async ({
|
|
6067
|
+
info,
|
|
6068
|
+
time,
|
|
6069
|
+
riffState
|
|
6070
|
+
}) => {
|
|
6071
|
+
const idx1Entries = await (info.hasIndex ? riffState.lazyIdx1.waitForLoaded() : Promise.resolve(null));
|
|
6072
|
+
if (idx1Entries === null) {
|
|
6073
|
+
const lastKeyframe = findLastKeyframe({
|
|
6074
|
+
keyframes: info.observedKeyframes,
|
|
6075
|
+
timeInSeconds: time
|
|
6076
|
+
});
|
|
6077
|
+
if (lastKeyframe === null) {
|
|
6078
|
+
return {
|
|
6079
|
+
type: "valid-but-must-wait"
|
|
6080
|
+
};
|
|
6081
|
+
}
|
|
6082
|
+
riffState.sampleCounter.setSamplesFromSeek(lastKeyframe.sampleCounts);
|
|
6083
|
+
return {
|
|
6084
|
+
type: "do-seek",
|
|
6085
|
+
byte: lastKeyframe.positionInBytes
|
|
6086
|
+
};
|
|
6087
|
+
}
|
|
6088
|
+
if (idx1Entries.videoTrackIndex === null) {
|
|
6089
|
+
throw new Error("videoTrackIndex is null");
|
|
6090
|
+
}
|
|
6091
|
+
if (info.samplesPerSecond === null) {
|
|
6092
|
+
throw new Error("samplesPerSecond is null");
|
|
6093
|
+
}
|
|
6094
|
+
const index = Math.floor(time * info.samplesPerSecond);
|
|
6095
|
+
let bestEntry = null;
|
|
6096
|
+
for (const entry of idx1Entries.entries) {
|
|
6097
|
+
if (entry.sampleCounts[idx1Entries.videoTrackIndex] > index) {
|
|
6098
|
+
continue;
|
|
6099
|
+
}
|
|
6100
|
+
if (bestEntry && entry.sampleCounts[idx1Entries.videoTrackIndex] < bestEntry.sampleCounts[idx1Entries.videoTrackIndex]) {
|
|
6101
|
+
continue;
|
|
6102
|
+
}
|
|
6103
|
+
bestEntry = entry;
|
|
6104
|
+
}
|
|
6105
|
+
if (!bestEntry) {
|
|
6106
|
+
throw new Error("No best entry");
|
|
6107
|
+
}
|
|
6108
|
+
if (info.moviOffset === null) {
|
|
6109
|
+
throw new Error("moviOffset is null");
|
|
6110
|
+
}
|
|
6111
|
+
riffState.sampleCounter.setSamplesFromSeek(bestEntry.sampleCounts);
|
|
6112
|
+
return {
|
|
6113
|
+
type: "do-seek",
|
|
6114
|
+
byte: bestEntry.offset + info.moviOffset - 4
|
|
6115
|
+
};
|
|
6116
|
+
};
|
|
6117
|
+
|
|
5737
6118
|
// src/containers/wav/get-seeking-byte.ts
|
|
5738
6119
|
var WAVE_SAMPLES_PER_SECOND = 25;
|
|
5739
6120
|
var getSeekingByteFromWav = ({
|
|
@@ -5880,22 +6261,6 @@ var convertAudioOrVideoSampleToWebCodecsTimestamps = ({
|
|
|
5880
6261
|
};
|
|
5881
6262
|
};
|
|
5882
6263
|
|
|
5883
|
-
// src/emit-audio-sample.ts
|
|
5884
|
-
var emitAudioSample = async ({
|
|
5885
|
-
trackId,
|
|
5886
|
-
audioSample,
|
|
5887
|
-
callbacks
|
|
5888
|
-
}) => {
|
|
5889
|
-
await callbacks.onAudioSample(trackId, audioSample);
|
|
5890
|
-
};
|
|
5891
|
-
var emitVideoSample = async ({
|
|
5892
|
-
trackId,
|
|
5893
|
-
videoSample,
|
|
5894
|
-
callbacks
|
|
5895
|
-
}) => {
|
|
5896
|
-
await callbacks.onVideoSample(trackId, videoSample);
|
|
5897
|
-
};
|
|
5898
|
-
|
|
5899
6264
|
// src/register-track.ts
|
|
5900
6265
|
var registerVideoTrack = async ({
|
|
5901
6266
|
track,
|
|
@@ -6320,14 +6685,11 @@ var handleAvcPacket = async ({
|
|
|
6320
6685
|
if (type === "key") {
|
|
6321
6686
|
transportStream.observedPesHeaders.markPtsAsKeyframe(streamBuffer.pesHeader.pts);
|
|
6322
6687
|
}
|
|
6323
|
-
|
|
6324
|
-
|
|
6325
|
-
|
|
6326
|
-
sample,
|
|
6327
|
-
timescale: MPEG_TIMESCALE
|
|
6328
|
-
}),
|
|
6329
|
-
callbacks: sampleCallbacks
|
|
6688
|
+
const videoSample = convertAudioOrVideoSampleToWebCodecsTimestamps({
|
|
6689
|
+
sample,
|
|
6690
|
+
timescale: MPEG_TIMESCALE
|
|
6330
6691
|
});
|
|
6692
|
+
await sampleCallbacks.onVideoSample(programId, videoSample);
|
|
6331
6693
|
transportStream.lastEmittedSample.setLastEmittedSample(sample);
|
|
6332
6694
|
};
|
|
6333
6695
|
|
|
@@ -6381,7 +6743,8 @@ var getSeekingByte = ({
|
|
|
6381
6743
|
webmState,
|
|
6382
6744
|
mediaSection,
|
|
6383
6745
|
mp4HeaderSegment,
|
|
6384
|
-
structure
|
|
6746
|
+
structure,
|
|
6747
|
+
riffState
|
|
6385
6748
|
}) => {
|
|
6386
6749
|
if (info.type === "iso-base-media-seeking-hints") {
|
|
6387
6750
|
return getSeekingByteFromIsoBaseMedia({
|
|
@@ -6409,6 +6772,21 @@ var getSeekingByte = ({
|
|
|
6409
6772
|
mediaSection
|
|
6410
6773
|
});
|
|
6411
6774
|
}
|
|
6775
|
+
if (info.type === "flac-seeking-hints") {
|
|
6776
|
+
const byte = getSeekingByteForFlac({
|
|
6777
|
+
seekingHints: info,
|
|
6778
|
+
time
|
|
6779
|
+
});
|
|
6780
|
+
if (byte) {
|
|
6781
|
+
return Promise.resolve({
|
|
6782
|
+
type: "do-seek",
|
|
6783
|
+
byte
|
|
6784
|
+
});
|
|
6785
|
+
}
|
|
6786
|
+
return Promise.resolve({
|
|
6787
|
+
type: "valid-but-must-wait"
|
|
6788
|
+
});
|
|
6789
|
+
}
|
|
6412
6790
|
if (info.type === "transport-stream-seeking-hints") {
|
|
6413
6791
|
const lastKeyframeBeforeTimeInSeconds = getLastKeyFrameBeforeTimeInSeconds({
|
|
6414
6792
|
observedPesHeaders: info.observedPesHeaders,
|
|
@@ -6422,9 +6800,63 @@ var getSeekingByte = ({
|
|
|
6422
6800
|
byte
|
|
6423
6801
|
});
|
|
6424
6802
|
}
|
|
6803
|
+
if (info.type === "riff-seeking-hints") {
|
|
6804
|
+
return getSeekingByteForRiff({
|
|
6805
|
+
info,
|
|
6806
|
+
time,
|
|
6807
|
+
riffState
|
|
6808
|
+
});
|
|
6809
|
+
}
|
|
6810
|
+
if (info.type === "mp3-seeking-hints") {
|
|
6811
|
+
return Promise.resolve(getSeekingByteForMp3({
|
|
6812
|
+
info,
|
|
6813
|
+
time
|
|
6814
|
+
}));
|
|
6815
|
+
}
|
|
6816
|
+
if (info.type === "aac-seeking-hints") {
|
|
6817
|
+
return Promise.resolve(getSeekingByteForAac({
|
|
6818
|
+
time,
|
|
6819
|
+
seekingHints: info
|
|
6820
|
+
}));
|
|
6821
|
+
}
|
|
6425
6822
|
throw new Error(`Unknown seeking info type: ${info}`);
|
|
6426
6823
|
};
|
|
6427
6824
|
|
|
6825
|
+
// src/containers/aac/seeking-hints.ts
|
|
6826
|
+
var getSeekingHintsForAac = ({
|
|
6827
|
+
aacState,
|
|
6828
|
+
samplesObserved
|
|
6829
|
+
}) => {
|
|
6830
|
+
return {
|
|
6831
|
+
type: "aac-seeking-hints",
|
|
6832
|
+
audioSampleMap: aacState.audioSamples.getSamples(),
|
|
6833
|
+
lastSampleObserved: samplesObserved.getLastSampleObserved()
|
|
6834
|
+
};
|
|
6835
|
+
};
|
|
6836
|
+
var setSeekingHintsForAac = () => {};
|
|
6837
|
+
|
|
6838
|
+
// src/containers/flac/seeking-hints.ts
|
|
6839
|
+
var getSeekingHintsForFlac = ({
|
|
6840
|
+
flacState,
|
|
6841
|
+
samplesObserved
|
|
6842
|
+
}) => {
|
|
6843
|
+
return {
|
|
6844
|
+
type: "flac-seeking-hints",
|
|
6845
|
+
audioSampleMap: flacState.audioSamples.getSamples(),
|
|
6846
|
+
blockingBitStrategy: flacState.getBlockingBitStrategy() ?? null,
|
|
6847
|
+
lastSampleObserved: samplesObserved.getLastSampleObserved()
|
|
6848
|
+
};
|
|
6849
|
+
};
|
|
6850
|
+
var setSeekingHintsForFlac = ({
|
|
6851
|
+
hints,
|
|
6852
|
+
state
|
|
6853
|
+
}) => {
|
|
6854
|
+
if (hints.blockingBitStrategy !== null) {
|
|
6855
|
+
state.flac.setBlockingBitStrategy(hints.blockingBitStrategy);
|
|
6856
|
+
}
|
|
6857
|
+
state.flac.audioSamples.setFromSeekingHints(hints.audioSampleMap);
|
|
6858
|
+
};
|
|
6859
|
+
|
|
6428
6860
|
// src/state/iso-base-media/precomputed-tfra.ts
|
|
6429
6861
|
var precomputedTfraState = () => {
|
|
6430
6862
|
let tfraBoxes = [];
|
|
@@ -6475,6 +6907,72 @@ var getSeekingHintsFromMp4 = ({
|
|
|
6475
6907
|
};
|
|
6476
6908
|
var setSeekingHintsForMp4 = ({}) => {};
|
|
6477
6909
|
|
|
6910
|
+
// src/containers/mp3/seeking-hints.ts
|
|
6911
|
+
var getSeekingHintsForMp3 = ({
|
|
6912
|
+
mp3State,
|
|
6913
|
+
samplesObserved,
|
|
6914
|
+
mediaSectionState: mediaSectionState2,
|
|
6915
|
+
contentLength
|
|
6916
|
+
}) => {
|
|
6917
|
+
return {
|
|
6918
|
+
type: "mp3-seeking-hints",
|
|
6919
|
+
audioSampleMap: mp3State.audioSamples.getSamples(),
|
|
6920
|
+
lastSampleObserved: samplesObserved.getLastSampleObserved(),
|
|
6921
|
+
mp3BitrateInfo: mp3State.getMp3BitrateInfo(),
|
|
6922
|
+
mp3Info: mp3State.getMp3Info(),
|
|
6923
|
+
mediaSection: mediaSectionState2.getMediaSections()[0] ?? null,
|
|
6924
|
+
contentLength
|
|
6925
|
+
};
|
|
6926
|
+
};
|
|
6927
|
+
var setSeekingHintsForMp3 = ({
|
|
6928
|
+
hints,
|
|
6929
|
+
state
|
|
6930
|
+
}) => {
|
|
6931
|
+
state.mp3.audioSamples.setFromSeekingHints(hints.audioSampleMap);
|
|
6932
|
+
};
|
|
6933
|
+
|
|
6934
|
+
// src/containers/riff/has-index.ts
|
|
6935
|
+
var riffHasIndex = (structure) => {
|
|
6936
|
+
return structure.boxes.find((b) => b.type === "list-box" && b.listType === "hdrl")?.children.find((box) => box.type === "avih-box")?.hasIndex ?? false;
|
|
6937
|
+
};
|
|
6938
|
+
|
|
6939
|
+
// src/containers/riff/seeking-hints.ts
|
|
6940
|
+
var getSeekingHintsForRiff = ({
|
|
6941
|
+
structureState,
|
|
6942
|
+
riffState,
|
|
6943
|
+
mediaSectionState: mediaSectionState2
|
|
6944
|
+
}) => {
|
|
6945
|
+
const structure = structureState.getRiffStructure();
|
|
6946
|
+
const strl = getStrlBoxes(structure);
|
|
6947
|
+
let samplesPerSecond = null;
|
|
6948
|
+
for (const s of strl) {
|
|
6949
|
+
const strh = getStrhBox(s.children);
|
|
6950
|
+
if (!strh) {
|
|
6951
|
+
throw new Error("No strh box");
|
|
6952
|
+
}
|
|
6953
|
+
if (strh.strf.type !== "strf-box-video") {
|
|
6954
|
+
continue;
|
|
6955
|
+
}
|
|
6956
|
+
samplesPerSecond = strh.rate / strh.scale;
|
|
6957
|
+
break;
|
|
6958
|
+
}
|
|
6959
|
+
return {
|
|
6960
|
+
type: "riff-seeking-hints",
|
|
6961
|
+
hasIndex: riffHasIndex(structure),
|
|
6962
|
+
idx1Entries: riffState.lazyIdx1.getIfAlreadyLoaded(),
|
|
6963
|
+
samplesPerSecond,
|
|
6964
|
+
moviOffset: mediaSectionState2.getMediaSections()[0]?.start ?? null,
|
|
6965
|
+
observedKeyframes: riffState.sampleCounter.riffKeys.getKeyframes()
|
|
6966
|
+
};
|
|
6967
|
+
};
|
|
6968
|
+
var setSeekingHintsForRiff = ({
|
|
6969
|
+
hints,
|
|
6970
|
+
state
|
|
6971
|
+
}) => {
|
|
6972
|
+
state.riff.lazyIdx1.setFromSeekingHints(hints);
|
|
6973
|
+
state.riff.sampleCounter.riffKeys.setFromSeekingHints(hints.observedKeyframes);
|
|
6974
|
+
};
|
|
6975
|
+
|
|
6478
6976
|
// src/containers/transport-stream/seeking-hints.ts
|
|
6479
6977
|
var getSeekingHintsFromTransportStream = (transportStream, tracksState) => {
|
|
6480
6978
|
const firstVideoTrack = tracksState.getTracks().find((t) => t.type === "video");
|
|
@@ -6548,7 +7046,7 @@ var setSeekingHintsForWebm = ({
|
|
|
6548
7046
|
state
|
|
6549
7047
|
}) => {
|
|
6550
7048
|
state.webm.cues.setFromSeekingHints(hints);
|
|
6551
|
-
state.keyframes.setFromSeekingHints(hints);
|
|
7049
|
+
state.keyframes.setFromSeekingHints(hints.keyframes);
|
|
6552
7050
|
state.webm.setTimeStampMapForSeekingHints(hints.timestampMap);
|
|
6553
7051
|
};
|
|
6554
7052
|
|
|
@@ -6561,7 +7059,13 @@ var getSeekingHints = ({
|
|
|
6561
7059
|
transportStream,
|
|
6562
7060
|
tracksState,
|
|
6563
7061
|
keyframesState,
|
|
6564
|
-
webmState
|
|
7062
|
+
webmState,
|
|
7063
|
+
flacState,
|
|
7064
|
+
samplesObserved,
|
|
7065
|
+
riffState,
|
|
7066
|
+
mp3State,
|
|
7067
|
+
contentLength,
|
|
7068
|
+
aacState
|
|
6565
7069
|
}) => {
|
|
6566
7070
|
const structure = structureState.getStructureOrNull();
|
|
6567
7071
|
if (!structure) {
|
|
@@ -6587,6 +7091,33 @@ var getSeekingHints = ({
|
|
|
6587
7091
|
if (structure.type === "transport-stream") {
|
|
6588
7092
|
return getSeekingHintsFromTransportStream(transportStream, tracksState);
|
|
6589
7093
|
}
|
|
7094
|
+
if (structure.type === "flac") {
|
|
7095
|
+
return getSeekingHintsForFlac({
|
|
7096
|
+
flacState,
|
|
7097
|
+
samplesObserved
|
|
7098
|
+
});
|
|
7099
|
+
}
|
|
7100
|
+
if (structure.type === "riff") {
|
|
7101
|
+
return getSeekingHintsForRiff({
|
|
7102
|
+
structureState,
|
|
7103
|
+
riffState,
|
|
7104
|
+
mediaSectionState: mediaSectionState2
|
|
7105
|
+
});
|
|
7106
|
+
}
|
|
7107
|
+
if (structure.type === "mp3") {
|
|
7108
|
+
return getSeekingHintsForMp3({
|
|
7109
|
+
mp3State,
|
|
7110
|
+
samplesObserved,
|
|
7111
|
+
mediaSectionState: mediaSectionState2,
|
|
7112
|
+
contentLength
|
|
7113
|
+
});
|
|
7114
|
+
}
|
|
7115
|
+
if (structure.type === "aac") {
|
|
7116
|
+
return getSeekingHintsForAac({
|
|
7117
|
+
aacState,
|
|
7118
|
+
samplesObserved
|
|
7119
|
+
});
|
|
7120
|
+
}
|
|
6590
7121
|
throw new Error(`Seeking is not supported for this format: ${structure.type}`);
|
|
6591
7122
|
};
|
|
6592
7123
|
|
|
@@ -6801,7 +7332,13 @@ var turnSeekIntoByte = async ({
|
|
|
6801
7332
|
transportStream,
|
|
6802
7333
|
tracksState,
|
|
6803
7334
|
webmState,
|
|
6804
|
-
keyframes
|
|
7335
|
+
keyframes,
|
|
7336
|
+
flacState,
|
|
7337
|
+
samplesObserved,
|
|
7338
|
+
riffState,
|
|
7339
|
+
mp3State,
|
|
7340
|
+
contentLength,
|
|
7341
|
+
aacState
|
|
6805
7342
|
}) => {
|
|
6806
7343
|
const mediaSections = mediaSectionState2.getMediaSections();
|
|
6807
7344
|
if (mediaSections.length === 0) {
|
|
@@ -6815,6 +7352,8 @@ var turnSeekIntoByte = async ({
|
|
|
6815
7352
|
throw new Error(`Cannot seek to a negative time: ${JSON.stringify(seek2)}`);
|
|
6816
7353
|
}
|
|
6817
7354
|
const seekingHints = getSeekingHints({
|
|
7355
|
+
riffState,
|
|
7356
|
+
samplesObserved,
|
|
6818
7357
|
structureState,
|
|
6819
7358
|
mp4HeaderSegment,
|
|
6820
7359
|
mediaSectionState: mediaSectionState2,
|
|
@@ -6822,7 +7361,11 @@ var turnSeekIntoByte = async ({
|
|
|
6822
7361
|
transportStream,
|
|
6823
7362
|
tracksState,
|
|
6824
7363
|
keyframesState: keyframes,
|
|
6825
|
-
webmState
|
|
7364
|
+
webmState,
|
|
7365
|
+
flacState,
|
|
7366
|
+
mp3State,
|
|
7367
|
+
contentLength,
|
|
7368
|
+
aacState
|
|
6826
7369
|
});
|
|
6827
7370
|
if (!seekingHints) {
|
|
6828
7371
|
Log.trace(logLevel, "No seeking info, cannot seek yet");
|
|
@@ -6840,7 +7383,8 @@ var turnSeekIntoByte = async ({
|
|
|
6840
7383
|
webmState,
|
|
6841
7384
|
mediaSection: mediaSectionState2,
|
|
6842
7385
|
mp4HeaderSegment,
|
|
6843
|
-
structure: structureState
|
|
7386
|
+
structure: structureState,
|
|
7387
|
+
riffState
|
|
6844
7388
|
});
|
|
6845
7389
|
return seekingByte;
|
|
6846
7390
|
}
|
|
@@ -6872,7 +7416,12 @@ var getWorkOnSeekRequestOptions = (state) => {
|
|
|
6872
7416
|
transportStream: state.transportStream,
|
|
6873
7417
|
tracksState: state.callbacks.tracks,
|
|
6874
7418
|
webmState: state.webm,
|
|
6875
|
-
keyframes: state.keyframes
|
|
7419
|
+
keyframes: state.keyframes,
|
|
7420
|
+
flacState: state.flac,
|
|
7421
|
+
samplesObserved: state.samplesObserved,
|
|
7422
|
+
riffState: state.riff,
|
|
7423
|
+
mp3State: state.mp3,
|
|
7424
|
+
aacState: state.aac
|
|
6876
7425
|
};
|
|
6877
7426
|
};
|
|
6878
7427
|
var workOnSeekRequest = async (options) => {
|
|
@@ -6895,7 +7444,12 @@ var workOnSeekRequest = async (options) => {
|
|
|
6895
7444
|
transportStream,
|
|
6896
7445
|
tracksState,
|
|
6897
7446
|
webmState,
|
|
6898
|
-
keyframes
|
|
7447
|
+
keyframes,
|
|
7448
|
+
flacState,
|
|
7449
|
+
samplesObserved,
|
|
7450
|
+
riffState,
|
|
7451
|
+
mp3State,
|
|
7452
|
+
aacState
|
|
6899
7453
|
} = options;
|
|
6900
7454
|
const seek2 = controller._internals.seekSignal.getSeek();
|
|
6901
7455
|
if (!seek2) {
|
|
@@ -6913,7 +7467,13 @@ var workOnSeekRequest = async (options) => {
|
|
|
6913
7467
|
transportStream,
|
|
6914
7468
|
tracksState,
|
|
6915
7469
|
webmState,
|
|
6916
|
-
keyframes
|
|
7470
|
+
keyframes,
|
|
7471
|
+
flacState,
|
|
7472
|
+
samplesObserved,
|
|
7473
|
+
riffState,
|
|
7474
|
+
mp3State,
|
|
7475
|
+
contentLength,
|
|
7476
|
+
aacState
|
|
6917
7477
|
});
|
|
6918
7478
|
Log.trace(logLevel, `Seek action: ${JSON.stringify(resolution)}`);
|
|
6919
7479
|
if (resolution.type === "intermediary-seek") {
|
|
@@ -7007,7 +7567,7 @@ var emitAvailableInfo = async ({
|
|
|
7007
7567
|
}
|
|
7008
7568
|
if (key === "slowDurationInSeconds") {
|
|
7009
7569
|
if (hasInfo.slowDurationInSeconds && !emittedFields.slowDurationInSeconds) {
|
|
7010
|
-
const slowDurationInSeconds = getDuration(state) ?? state.
|
|
7570
|
+
const slowDurationInSeconds = getDuration(state) ?? state.samplesObserved.getSlowDurationInSeconds();
|
|
7011
7571
|
await callbackFunctions.onSlowDurationInSeconds?.(slowDurationInSeconds);
|
|
7012
7572
|
if (fieldsInReturnValue.slowDurationInSeconds) {
|
|
7013
7573
|
returnValue.slowDurationInSeconds = slowDurationInSeconds;
|
|
@@ -7041,7 +7601,7 @@ var emitAvailableInfo = async ({
|
|
|
7041
7601
|
}
|
|
7042
7602
|
if (key === "slowFps") {
|
|
7043
7603
|
if (hasInfo.slowFps && !emittedFields.slowFps) {
|
|
7044
|
-
const slowFps = state.
|
|
7604
|
+
const slowFps = state.samplesObserved.getFps();
|
|
7045
7605
|
await callbackFunctions.onSlowFps?.(slowFps);
|
|
7046
7606
|
if (fieldsInReturnValue.slowFps) {
|
|
7047
7607
|
returnValue.slowFps = slowFps;
|
|
@@ -7221,9 +7781,9 @@ var emitAvailableInfo = async ({
|
|
|
7221
7781
|
}
|
|
7222
7782
|
if (key === "slowNumberOfFrames") {
|
|
7223
7783
|
if (!emittedFields.slowNumberOfFrames && hasInfo.slowNumberOfFrames) {
|
|
7224
|
-
await callbackFunctions.onSlowNumberOfFrames?.(state.
|
|
7784
|
+
await callbackFunctions.onSlowNumberOfFrames?.(state.samplesObserved.getSlowNumberOfFrames());
|
|
7225
7785
|
if (fieldsInReturnValue.slowNumberOfFrames) {
|
|
7226
|
-
returnValue.slowNumberOfFrames = state.
|
|
7786
|
+
returnValue.slowNumberOfFrames = state.samplesObserved.getSlowNumberOfFrames();
|
|
7227
7787
|
}
|
|
7228
7788
|
emittedFields.slowNumberOfFrames = true;
|
|
7229
7789
|
}
|
|
@@ -7231,9 +7791,9 @@ var emitAvailableInfo = async ({
|
|
|
7231
7791
|
}
|
|
7232
7792
|
if (key === "slowAudioBitrate") {
|
|
7233
7793
|
if (!emittedFields.slowAudioBitrate && hasInfo.slowAudioBitrate) {
|
|
7234
|
-
await callbackFunctions.onSlowAudioBitrate?.(state.
|
|
7794
|
+
await callbackFunctions.onSlowAudioBitrate?.(state.samplesObserved.getAudioBitrate());
|
|
7235
7795
|
if (fieldsInReturnValue.slowAudioBitrate) {
|
|
7236
|
-
returnValue.slowAudioBitrate = state.
|
|
7796
|
+
returnValue.slowAudioBitrate = state.samplesObserved.getAudioBitrate();
|
|
7237
7797
|
}
|
|
7238
7798
|
emittedFields.slowAudioBitrate = true;
|
|
7239
7799
|
}
|
|
@@ -7241,9 +7801,9 @@ var emitAvailableInfo = async ({
|
|
|
7241
7801
|
}
|
|
7242
7802
|
if (key === "slowVideoBitrate") {
|
|
7243
7803
|
if (!emittedFields.slowVideoBitrate && hasInfo.slowVideoBitrate) {
|
|
7244
|
-
await callbackFunctions.onSlowVideoBitrate?.(state.
|
|
7804
|
+
await callbackFunctions.onSlowVideoBitrate?.(state.samplesObserved.getVideoBitrate());
|
|
7245
7805
|
if (fieldsInReturnValue.slowVideoBitrate) {
|
|
7246
|
-
returnValue.slowVideoBitrate = state.
|
|
7806
|
+
returnValue.slowVideoBitrate = state.samplesObserved.getVideoBitrate();
|
|
7247
7807
|
}
|
|
7248
7808
|
emittedFields.slowVideoBitrate = true;
|
|
7249
7809
|
}
|
|
@@ -7520,6 +8080,10 @@ var parseAac = async (state) => {
|
|
|
7520
8080
|
iterator.counter.decrement(iterator.counter.getOffset() - startOffset);
|
|
7521
8081
|
const data = iterator.getSlice(frameLength);
|
|
7522
8082
|
if (state.callbacks.tracks.getTracks().length === 0) {
|
|
8083
|
+
state.mediaSection.addMediaSection({
|
|
8084
|
+
start: startOffset,
|
|
8085
|
+
size: state.contentLength - startOffset
|
|
8086
|
+
});
|
|
7523
8087
|
await registerAudioTrack({
|
|
7524
8088
|
container: "aac",
|
|
7525
8089
|
track: {
|
|
@@ -7544,24 +8108,26 @@ var parseAac = async (state) => {
|
|
|
7544
8108
|
const duration2 = 1024 / sampleRate;
|
|
7545
8109
|
const { index } = state.aac.addSample({ offset: startOffset, size: frameLength });
|
|
7546
8110
|
const timestamp = 1024 / sampleRate * index;
|
|
7547
|
-
|
|
7548
|
-
|
|
7549
|
-
|
|
7550
|
-
|
|
7551
|
-
duration: duration2,
|
|
7552
|
-
type: "key",
|
|
7553
|
-
data,
|
|
7554
|
-
offset: startOffset,
|
|
7555
|
-
timescale: 1e6,
|
|
7556
|
-
trackId: 0,
|
|
7557
|
-
cts: timestamp,
|
|
7558
|
-
dts: timestamp,
|
|
7559
|
-
timestamp
|
|
7560
|
-
},
|
|
7561
|
-
timescale: 1
|
|
7562
|
-
}),
|
|
7563
|
-
callbacks: state.callbacks
|
|
8111
|
+
state.aac.audioSamples.addSample({
|
|
8112
|
+
timeInSeconds: timestamp,
|
|
8113
|
+
offset: startOffset,
|
|
8114
|
+
durationInSeconds: duration2
|
|
7564
8115
|
});
|
|
8116
|
+
const audioSample = convertAudioOrVideoSampleToWebCodecsTimestamps({
|
|
8117
|
+
sample: {
|
|
8118
|
+
duration: duration2,
|
|
8119
|
+
type: "key",
|
|
8120
|
+
data,
|
|
8121
|
+
offset: startOffset,
|
|
8122
|
+
timescale: 1e6,
|
|
8123
|
+
trackId: 0,
|
|
8124
|
+
cts: timestamp,
|
|
8125
|
+
dts: timestamp,
|
|
8126
|
+
timestamp
|
|
8127
|
+
},
|
|
8128
|
+
timescale: 1
|
|
8129
|
+
});
|
|
8130
|
+
await state.callbacks.onAudioSample(0, audioSample);
|
|
7565
8131
|
return Promise.resolve(null);
|
|
7566
8132
|
};
|
|
7567
8133
|
|
|
@@ -7751,24 +8317,26 @@ var emitSample = async ({
|
|
|
7751
8317
|
throw new Error("Cannot determine timestamp");
|
|
7752
8318
|
}
|
|
7753
8319
|
const timestamp = num * streamInfo.maximumBlockSize / streamInfo.sampleRate;
|
|
7754
|
-
|
|
7755
|
-
|
|
7756
|
-
|
|
7757
|
-
|
|
7758
|
-
data,
|
|
7759
|
-
duration: duration2,
|
|
7760
|
-
cts: timestamp,
|
|
7761
|
-
dts: timestamp,
|
|
7762
|
-
timestamp,
|
|
7763
|
-
type: "key",
|
|
7764
|
-
offset,
|
|
7765
|
-
timescale: 1e6,
|
|
7766
|
-
trackId: 0
|
|
7767
|
-
},
|
|
7768
|
-
timescale: 1
|
|
7769
|
-
}),
|
|
7770
|
-
callbacks: state.callbacks
|
|
8320
|
+
state.flac.audioSamples.addSample({
|
|
8321
|
+
timeInSeconds: timestamp,
|
|
8322
|
+
offset,
|
|
8323
|
+
durationInSeconds: duration2
|
|
7771
8324
|
});
|
|
8325
|
+
const audioSample = convertAudioOrVideoSampleToWebCodecsTimestamps({
|
|
8326
|
+
sample: {
|
|
8327
|
+
data,
|
|
8328
|
+
duration: duration2,
|
|
8329
|
+
cts: timestamp,
|
|
8330
|
+
dts: timestamp,
|
|
8331
|
+
timestamp,
|
|
8332
|
+
type: "key",
|
|
8333
|
+
offset,
|
|
8334
|
+
timescale: 1,
|
|
8335
|
+
trackId: 0
|
|
8336
|
+
},
|
|
8337
|
+
timescale: 1
|
|
8338
|
+
});
|
|
8339
|
+
await state.callbacks.onAudioSample(0, audioSample);
|
|
7772
8340
|
iterator.destroy();
|
|
7773
8341
|
};
|
|
7774
8342
|
var parseFlacFrame = async ({
|
|
@@ -7950,27 +8518,15 @@ var parseFlacUnkownBlock = ({
|
|
|
7950
8518
|
return Promise.resolve(null);
|
|
7951
8519
|
};
|
|
7952
8520
|
|
|
7953
|
-
// src/containers/flac/parse-
|
|
8521
|
+
// src/containers/flac/parse-meta.ts
|
|
7954
8522
|
var flacTypes = {
|
|
7955
8523
|
streaminfo: 0,
|
|
7956
8524
|
vorbisComment: 4
|
|
7957
8525
|
};
|
|
7958
|
-
var
|
|
8526
|
+
var parseMetaBlock = ({
|
|
7959
8527
|
iterator,
|
|
7960
8528
|
state
|
|
7961
8529
|
}) => {
|
|
7962
|
-
const mediaSectionState2 = state.mediaSection.isCurrentByteInMediaSection(iterator);
|
|
7963
|
-
if (mediaSectionState2 === "in-section") {
|
|
7964
|
-
if (maySkipVideoData({ state })) {
|
|
7965
|
-
return Promise.resolve(makeSkip(state.contentLength));
|
|
7966
|
-
}
|
|
7967
|
-
return parseFlacFrame({ state, iterator });
|
|
7968
|
-
}
|
|
7969
|
-
const bytes = iterator.getByteString(4, true);
|
|
7970
|
-
if (bytes === "fLaC") {
|
|
7971
|
-
return parseFlacHeader({ state, iterator });
|
|
7972
|
-
}
|
|
7973
|
-
iterator.counter.decrement(4);
|
|
7974
8530
|
iterator.startReadingBits();
|
|
7975
8531
|
const isLastMetadata = iterator.getBits(1);
|
|
7976
8532
|
const metaBlockType = iterator.getBits(7);
|
|
@@ -7991,6 +8547,29 @@ var parseFlac = ({
|
|
|
7991
8547
|
return parseFlacUnkownBlock({ iterator, state, size });
|
|
7992
8548
|
};
|
|
7993
8549
|
|
|
8550
|
+
// src/containers/flac/parse-flac.ts
|
|
8551
|
+
var parseFlac = ({
|
|
8552
|
+
iterator,
|
|
8553
|
+
state
|
|
8554
|
+
}) => {
|
|
8555
|
+
const mediaSectionState2 = state.mediaSection.isCurrentByteInMediaSection(iterator);
|
|
8556
|
+
if (mediaSectionState2 === "in-section") {
|
|
8557
|
+
if (maySkipVideoData({ state })) {
|
|
8558
|
+
return Promise.resolve(makeSkip(state.contentLength));
|
|
8559
|
+
}
|
|
8560
|
+
return parseFlacFrame({ state, iterator });
|
|
8561
|
+
}
|
|
8562
|
+
const bytes = iterator.getByteString(4, true);
|
|
8563
|
+
if (bytes === "fLaC") {
|
|
8564
|
+
return parseFlacHeader({ state, iterator });
|
|
8565
|
+
}
|
|
8566
|
+
iterator.counter.decrement(4);
|
|
8567
|
+
return parseMetaBlock({
|
|
8568
|
+
iterator,
|
|
8569
|
+
state
|
|
8570
|
+
});
|
|
8571
|
+
};
|
|
8572
|
+
|
|
7994
8573
|
// src/state/iso-base-media/cached-sample-positions.ts
|
|
7995
8574
|
var calculateFlatSamples = (state) => {
|
|
7996
8575
|
const tracks2 = getTracks(state, true);
|
|
@@ -8342,7 +8921,8 @@ var getIsoBaseMediaChildren = async ({
|
|
|
8342
8921
|
size,
|
|
8343
8922
|
iterator,
|
|
8344
8923
|
logLevel,
|
|
8345
|
-
onlyIfMoovAtomExpected
|
|
8924
|
+
onlyIfMoovAtomExpected,
|
|
8925
|
+
contentLength
|
|
8346
8926
|
}) => {
|
|
8347
8927
|
const boxes = [];
|
|
8348
8928
|
const initial = iterator.counter.getOffset();
|
|
@@ -8351,7 +8931,8 @@ var getIsoBaseMediaChildren = async ({
|
|
|
8351
8931
|
iterator,
|
|
8352
8932
|
logLevel,
|
|
8353
8933
|
onlyIfMoovAtomExpected,
|
|
8354
|
-
onlyIfMdatAtomExpected: null
|
|
8934
|
+
onlyIfMdatAtomExpected: null,
|
|
8935
|
+
contentLength
|
|
8355
8936
|
});
|
|
8356
8937
|
if (!parsed) {
|
|
8357
8938
|
throw new Error("Expected box");
|
|
@@ -8614,13 +9195,15 @@ var parseMoov = async ({
|
|
|
8614
9195
|
size,
|
|
8615
9196
|
onlyIfMoovAtomExpected,
|
|
8616
9197
|
iterator,
|
|
8617
|
-
logLevel
|
|
9198
|
+
logLevel,
|
|
9199
|
+
contentLength
|
|
8618
9200
|
}) => {
|
|
8619
9201
|
const children = await getIsoBaseMediaChildren({
|
|
8620
9202
|
onlyIfMoovAtomExpected,
|
|
8621
9203
|
size: size - 8,
|
|
8622
9204
|
iterator,
|
|
8623
|
-
logLevel
|
|
9205
|
+
logLevel,
|
|
9206
|
+
contentLength
|
|
8624
9207
|
});
|
|
8625
9208
|
return {
|
|
8626
9209
|
offset,
|
|
@@ -8968,7 +9551,8 @@ var parseMebx = async ({
|
|
|
8968
9551
|
offset,
|
|
8969
9552
|
size,
|
|
8970
9553
|
iterator,
|
|
8971
|
-
logLevel
|
|
9554
|
+
logLevel,
|
|
9555
|
+
contentLength
|
|
8972
9556
|
}) => {
|
|
8973
9557
|
iterator.discard(6);
|
|
8974
9558
|
const dataReferenceIndex = iterator.getUint16();
|
|
@@ -8976,7 +9560,8 @@ var parseMebx = async ({
|
|
|
8976
9560
|
iterator,
|
|
8977
9561
|
size: size - 8,
|
|
8978
9562
|
logLevel,
|
|
8979
|
-
onlyIfMoovAtomExpected: null
|
|
9563
|
+
onlyIfMoovAtomExpected: null,
|
|
9564
|
+
contentLength
|
|
8980
9565
|
});
|
|
8981
9566
|
return {
|
|
8982
9567
|
type: "mebx-box",
|
|
@@ -9138,7 +9723,8 @@ var audioTags = [
|
|
|
9138
9723
|
];
|
|
9139
9724
|
var processIsoFormatBox = async ({
|
|
9140
9725
|
iterator,
|
|
9141
|
-
logLevel
|
|
9726
|
+
logLevel,
|
|
9727
|
+
contentLength
|
|
9142
9728
|
}) => {
|
|
9143
9729
|
const fileOffset = iterator.counter.getOffset();
|
|
9144
9730
|
const bytesRemaining = iterator.bytesRemaining();
|
|
@@ -9178,7 +9764,8 @@ var processIsoFormatBox = async ({
|
|
|
9178
9764
|
iterator,
|
|
9179
9765
|
logLevel,
|
|
9180
9766
|
size: boxSize - (iterator.counter.getOffset() - fileOffset),
|
|
9181
|
-
onlyIfMoovAtomExpected: null
|
|
9767
|
+
onlyIfMoovAtomExpected: null,
|
|
9768
|
+
contentLength
|
|
9182
9769
|
});
|
|
9183
9770
|
return {
|
|
9184
9771
|
sample: {
|
|
@@ -9217,7 +9804,8 @@ var processIsoFormatBox = async ({
|
|
|
9217
9804
|
iterator,
|
|
9218
9805
|
logLevel,
|
|
9219
9806
|
size: boxSize - (iterator.counter.getOffset() - fileOffset),
|
|
9220
|
-
onlyIfMoovAtomExpected: null
|
|
9807
|
+
onlyIfMoovAtomExpected: null,
|
|
9808
|
+
contentLength
|
|
9221
9809
|
});
|
|
9222
9810
|
return {
|
|
9223
9811
|
sample: {
|
|
@@ -9260,7 +9848,8 @@ var processIsoFormatBox = async ({
|
|
|
9260
9848
|
iterator,
|
|
9261
9849
|
logLevel,
|
|
9262
9850
|
size: boxSize - (iterator.counter.getOffset() - fileOffset),
|
|
9263
|
-
onlyIfMoovAtomExpected: null
|
|
9851
|
+
onlyIfMoovAtomExpected: null,
|
|
9852
|
+
contentLength
|
|
9264
9853
|
});
|
|
9265
9854
|
return {
|
|
9266
9855
|
sample: {
|
|
@@ -9307,7 +9896,8 @@ var processIsoFormatBox = async ({
|
|
|
9307
9896
|
onlyIfMoovAtomExpected: null,
|
|
9308
9897
|
iterator,
|
|
9309
9898
|
logLevel,
|
|
9310
|
-
size: bytesRemainingInBox
|
|
9899
|
+
size: bytesRemainingInBox,
|
|
9900
|
+
contentLength
|
|
9311
9901
|
}) : (iterator.discard(bytesRemainingInBox), []);
|
|
9312
9902
|
return {
|
|
9313
9903
|
sample: {
|
|
@@ -9339,14 +9929,16 @@ var processIsoFormatBox = async ({
|
|
|
9339
9929
|
var parseIsoFormatBoxes = async ({
|
|
9340
9930
|
maxBytes,
|
|
9341
9931
|
logLevel,
|
|
9342
|
-
iterator
|
|
9932
|
+
iterator,
|
|
9933
|
+
contentLength
|
|
9343
9934
|
}) => {
|
|
9344
9935
|
const samples = [];
|
|
9345
9936
|
const initialOffset = iterator.counter.getOffset();
|
|
9346
9937
|
while (iterator.bytesRemaining() > 0 && iterator.counter.getOffset() - initialOffset < maxBytes) {
|
|
9347
9938
|
const { sample } = await processIsoFormatBox({
|
|
9348
9939
|
iterator,
|
|
9349
|
-
logLevel
|
|
9940
|
+
logLevel,
|
|
9941
|
+
contentLength
|
|
9350
9942
|
});
|
|
9351
9943
|
if (sample) {
|
|
9352
9944
|
samples.push(sample);
|
|
@@ -9360,7 +9952,8 @@ var parseStsd = async ({
|
|
|
9360
9952
|
offset,
|
|
9361
9953
|
size,
|
|
9362
9954
|
iterator,
|
|
9363
|
-
logLevel
|
|
9955
|
+
logLevel,
|
|
9956
|
+
contentLength
|
|
9364
9957
|
}) => {
|
|
9365
9958
|
const version = iterator.getUint8();
|
|
9366
9959
|
if (version !== 0) {
|
|
@@ -9372,7 +9965,8 @@ var parseStsd = async ({
|
|
|
9372
9965
|
const boxes = await parseIsoFormatBoxes({
|
|
9373
9966
|
maxBytes: bytesRemainingInBox,
|
|
9374
9967
|
logLevel,
|
|
9375
|
-
iterator
|
|
9968
|
+
iterator,
|
|
9969
|
+
contentLength
|
|
9376
9970
|
});
|
|
9377
9971
|
if (boxes.length !== numberOfEntries) {
|
|
9378
9972
|
throw new Error(`Expected ${numberOfEntries} sample descriptions, got ${boxes.length}`);
|
|
@@ -9641,13 +10235,15 @@ var parseTrak = async ({
|
|
|
9641
10235
|
size,
|
|
9642
10236
|
offsetAtStart,
|
|
9643
10237
|
iterator,
|
|
9644
|
-
logLevel
|
|
10238
|
+
logLevel,
|
|
10239
|
+
contentLength
|
|
9645
10240
|
}) => {
|
|
9646
10241
|
const children = await getIsoBaseMediaChildren({
|
|
9647
10242
|
onlyIfMoovAtomExpected: null,
|
|
9648
10243
|
size: size - 8,
|
|
9649
10244
|
iterator,
|
|
9650
|
-
logLevel
|
|
10245
|
+
logLevel,
|
|
10246
|
+
contentLength
|
|
9651
10247
|
});
|
|
9652
10248
|
return {
|
|
9653
10249
|
offset: offsetAtStart,
|
|
@@ -9704,7 +10300,8 @@ var processBox = async ({
|
|
|
9704
10300
|
iterator,
|
|
9705
10301
|
logLevel,
|
|
9706
10302
|
onlyIfMoovAtomExpected,
|
|
9707
|
-
onlyIfMdatAtomExpected
|
|
10303
|
+
onlyIfMdatAtomExpected,
|
|
10304
|
+
contentLength
|
|
9708
10305
|
}) => {
|
|
9709
10306
|
const fileOffset = iterator.counter.getOffset();
|
|
9710
10307
|
const { returnToCheckpoint } = iterator.startCheckpoint();
|
|
@@ -9721,13 +10318,14 @@ var processBox = async ({
|
|
|
9721
10318
|
iterator.counter.decrement(iterator.counter.getOffset() - fileOffset);
|
|
9722
10319
|
throw new Error(`Expected box size of ${bytesRemaining}, got ${boxSizeRaw}. Incomplete boxes are not allowed.`);
|
|
9723
10320
|
}
|
|
10321
|
+
const maxSize = contentLength - startOff;
|
|
9724
10322
|
const boxType = iterator.getByteString(4, false);
|
|
9725
|
-
const
|
|
9726
|
-
|
|
10323
|
+
const boxSizeUnlimited = boxSizeRaw === 1 ? iterator.getEightByteNumber() : boxSizeRaw;
|
|
10324
|
+
const boxSize = Math.min(boxSizeUnlimited, maxSize);
|
|
9727
10325
|
const headerLength = iterator.counter.getOffset() - startOff;
|
|
9728
10326
|
if (boxType === "mdat") {
|
|
9729
10327
|
if (!onlyIfMdatAtomExpected) {
|
|
9730
|
-
|
|
10328
|
+
return null;
|
|
9731
10329
|
}
|
|
9732
10330
|
const { mediaSectionState: mediaSectionState2 } = onlyIfMdatAtomExpected;
|
|
9733
10331
|
mediaSectionState2.addMediaSection({
|
|
@@ -9766,7 +10364,8 @@ var processBox = async ({
|
|
|
9766
10364
|
offset: fileOffset,
|
|
9767
10365
|
size: boxSize,
|
|
9768
10366
|
iterator,
|
|
9769
|
-
logLevel
|
|
10367
|
+
logLevel,
|
|
10368
|
+
contentLength
|
|
9770
10369
|
});
|
|
9771
10370
|
}
|
|
9772
10371
|
if (boxType === "stsz") {
|
|
@@ -9817,7 +10416,8 @@ var processBox = async ({
|
|
|
9817
10416
|
offset: fileOffset,
|
|
9818
10417
|
size: boxSize,
|
|
9819
10418
|
iterator,
|
|
9820
|
-
logLevel
|
|
10419
|
+
logLevel,
|
|
10420
|
+
contentLength
|
|
9821
10421
|
});
|
|
9822
10422
|
}
|
|
9823
10423
|
if (boxType === "hdlr") {
|
|
@@ -9859,7 +10459,8 @@ var processBox = async ({
|
|
|
9859
10459
|
size: boxSize,
|
|
9860
10460
|
onlyIfMoovAtomExpected,
|
|
9861
10461
|
iterator,
|
|
9862
|
-
logLevel
|
|
10462
|
+
logLevel,
|
|
10463
|
+
contentLength
|
|
9863
10464
|
});
|
|
9864
10465
|
tracks2.setIsDone(logLevel);
|
|
9865
10466
|
return box;
|
|
@@ -9873,7 +10474,8 @@ var processBox = async ({
|
|
|
9873
10474
|
size: boxSize,
|
|
9874
10475
|
offsetAtStart: fileOffset,
|
|
9875
10476
|
iterator,
|
|
9876
|
-
logLevel
|
|
10477
|
+
logLevel,
|
|
10478
|
+
contentLength
|
|
9877
10479
|
});
|
|
9878
10480
|
const transformedTrack = makeBaseMediaTrack(box);
|
|
9879
10481
|
if (transformedTrack && transformedTrack.type === "video") {
|
|
@@ -9953,7 +10555,8 @@ var processBox = async ({
|
|
|
9953
10555
|
iterator,
|
|
9954
10556
|
size: boxSize - 8,
|
|
9955
10557
|
logLevel,
|
|
9956
|
-
onlyIfMoovAtomExpected
|
|
10558
|
+
onlyIfMoovAtomExpected,
|
|
10559
|
+
contentLength
|
|
9957
10560
|
});
|
|
9958
10561
|
return {
|
|
9959
10562
|
type: "regular-box",
|
|
@@ -10015,7 +10618,7 @@ var getMoovAtom = async ({
|
|
|
10015
10618
|
});
|
|
10016
10619
|
return null;
|
|
10017
10620
|
} : null;
|
|
10018
|
-
const iterator = getArrayBufferIterator(new Uint8Array([]), state.contentLength);
|
|
10621
|
+
const iterator = getArrayBufferIterator(new Uint8Array([]), state.contentLength - endOfMdat);
|
|
10019
10622
|
while (true) {
|
|
10020
10623
|
const result = await reader.reader.read();
|
|
10021
10624
|
if (result.value) {
|
|
@@ -10045,7 +10648,8 @@ var getMoovAtom = async ({
|
|
|
10045
10648
|
registerVideoSampleCallback: () => Promise.resolve(),
|
|
10046
10649
|
registerAudioSampleCallback: () => Promise.resolve()
|
|
10047
10650
|
},
|
|
10048
|
-
onlyIfMdatAtomExpected: null
|
|
10651
|
+
onlyIfMdatAtomExpected: null,
|
|
10652
|
+
contentLength: state.contentLength - endOfMdat
|
|
10049
10653
|
});
|
|
10050
10654
|
if (box) {
|
|
10051
10655
|
boxes.push(box);
|
|
@@ -10139,24 +10743,21 @@ var parseMdatSection = async (state) => {
|
|
|
10139
10743
|
chunkSize
|
|
10140
10744
|
});
|
|
10141
10745
|
if (samplesWithIndex.track.type === "audio") {
|
|
10142
|
-
|
|
10143
|
-
|
|
10144
|
-
|
|
10145
|
-
|
|
10146
|
-
|
|
10147
|
-
|
|
10148
|
-
|
|
10149
|
-
|
|
10150
|
-
|
|
10151
|
-
|
|
10152
|
-
type: isKeyframe ? "key" : "delta",
|
|
10153
|
-
offset,
|
|
10154
|
-
timescale: samplesWithIndex.track.timescale
|
|
10155
|
-
},
|
|
10746
|
+
const audioSample = convertAudioOrVideoSampleToWebCodecsTimestamps({
|
|
10747
|
+
sample: {
|
|
10748
|
+
data: bytes,
|
|
10749
|
+
timestamp: cts,
|
|
10750
|
+
duration: duration2,
|
|
10751
|
+
cts,
|
|
10752
|
+
dts,
|
|
10753
|
+
trackId: samplesWithIndex.track.trackId,
|
|
10754
|
+
type: isKeyframe ? "key" : "delta",
|
|
10755
|
+
offset,
|
|
10156
10756
|
timescale: samplesWithIndex.track.timescale
|
|
10157
|
-
}
|
|
10158
|
-
|
|
10757
|
+
},
|
|
10758
|
+
timescale: samplesWithIndex.track.timescale
|
|
10159
10759
|
});
|
|
10760
|
+
await state.callbacks.onAudioSample(samplesWithIndex.track.trackId, audioSample);
|
|
10160
10761
|
}
|
|
10161
10762
|
if (samplesWithIndex.track.type === "video") {
|
|
10162
10763
|
const nalUnitType = bytes[4] & 31;
|
|
@@ -10165,24 +10766,21 @@ var parseMdatSection = async (state) => {
|
|
|
10165
10766
|
const seiType = bytes[5];
|
|
10166
10767
|
isRecoveryPoint = seiType === 6;
|
|
10167
10768
|
}
|
|
10168
|
-
|
|
10169
|
-
|
|
10170
|
-
|
|
10171
|
-
|
|
10172
|
-
|
|
10173
|
-
|
|
10174
|
-
|
|
10175
|
-
|
|
10176
|
-
|
|
10177
|
-
|
|
10178
|
-
type: isKeyframe && !isRecoveryPoint ? "key" : "delta",
|
|
10179
|
-
offset,
|
|
10180
|
-
timescale: samplesWithIndex.track.timescale
|
|
10181
|
-
},
|
|
10769
|
+
const videoSample = convertAudioOrVideoSampleToWebCodecsTimestamps({
|
|
10770
|
+
sample: {
|
|
10771
|
+
data: bytes,
|
|
10772
|
+
timestamp: cts,
|
|
10773
|
+
duration: duration2,
|
|
10774
|
+
cts,
|
|
10775
|
+
dts,
|
|
10776
|
+
trackId: samplesWithIndex.track.trackId,
|
|
10777
|
+
type: isKeyframe && !isRecoveryPoint ? "key" : "delta",
|
|
10778
|
+
offset,
|
|
10182
10779
|
timescale: samplesWithIndex.track.timescale
|
|
10183
|
-
}
|
|
10184
|
-
|
|
10780
|
+
},
|
|
10781
|
+
timescale: samplesWithIndex.track.timescale
|
|
10185
10782
|
});
|
|
10783
|
+
await state.callbacks.onVideoSample(samplesWithIndex.track.trackId, videoSample);
|
|
10186
10784
|
}
|
|
10187
10785
|
return null;
|
|
10188
10786
|
};
|
|
@@ -10207,7 +10805,8 @@ var parseIsoBaseMedia = async (state) => {
|
|
|
10207
10805
|
},
|
|
10208
10806
|
onlyIfMdatAtomExpected: {
|
|
10209
10807
|
mediaSectionState: state.mediaSection
|
|
10210
|
-
}
|
|
10808
|
+
},
|
|
10809
|
+
contentLength: state.contentLength
|
|
10211
10810
|
});
|
|
10212
10811
|
if (result) {
|
|
10213
10812
|
state.structure.getIsoStructure().boxes.push(result);
|
|
@@ -10981,7 +11580,7 @@ var parseID3V1 = (iterator) => {
|
|
|
10981
11580
|
iterator.discard(128);
|
|
10982
11581
|
};
|
|
10983
11582
|
|
|
10984
|
-
// src/containers/mp3/parse-
|
|
11583
|
+
// src/containers/mp3/parse-packet-header.ts
|
|
10985
11584
|
function getSamplingFrequency({
|
|
10986
11585
|
bits,
|
|
10987
11586
|
mpegVersion
|
|
@@ -11123,15 +11722,7 @@ function getBitrateKB({
|
|
|
11123
11722
|
}
|
|
11124
11723
|
return bitrateTable[bits][key];
|
|
11125
11724
|
}
|
|
11126
|
-
var
|
|
11127
|
-
state
|
|
11128
|
-
}) => {
|
|
11129
|
-
const { iterator } = state;
|
|
11130
|
-
const initialOffset = iterator.counter.getOffset();
|
|
11131
|
-
if (iterator.bytesRemaining() < 32) {
|
|
11132
|
-
return;
|
|
11133
|
-
}
|
|
11134
|
-
iterator.startReadingBits();
|
|
11725
|
+
var innerParseMp3PacketHeader = (iterator) => {
|
|
11135
11726
|
for (let i = 0;i < 11; i++) {
|
|
11136
11727
|
const expectToBe1 = iterator.getBits(1);
|
|
11137
11728
|
if (expectToBe1 !== 1) {
|
|
@@ -11153,15 +11744,15 @@ var parseMpegHeader = async ({
|
|
|
11153
11744
|
throw new Error("Does not support CRC yet");
|
|
11154
11745
|
}
|
|
11155
11746
|
const bitrateIndex = iterator.getBits(4);
|
|
11156
|
-
const
|
|
11747
|
+
const bitrateInKbit = getBitrateKB({
|
|
11157
11748
|
bits: bitrateIndex,
|
|
11158
11749
|
mpegVersion,
|
|
11159
11750
|
level: audioVersionId
|
|
11160
11751
|
});
|
|
11161
|
-
if (
|
|
11752
|
+
if (bitrateInKbit === "bad") {
|
|
11162
11753
|
throw new Error("Invalid bitrate");
|
|
11163
11754
|
}
|
|
11164
|
-
if (
|
|
11755
|
+
if (bitrateInKbit === "free") {
|
|
11165
11756
|
throw new Error("Free bitrate not supported");
|
|
11166
11757
|
}
|
|
11167
11758
|
const samplingFrequencyIndex = iterator.getBits(2);
|
|
@@ -11179,89 +11770,272 @@ var parseMpegHeader = async ({
|
|
|
11179
11770
|
const numberOfChannels = channelMode === 3 ? 1 : 2;
|
|
11180
11771
|
const samplesPerFrame = getSamplesPerMpegFrame({ mpegVersion, layer });
|
|
11181
11772
|
const frameLength = getMpegFrameLength({
|
|
11182
|
-
bitrateKbit,
|
|
11773
|
+
bitrateKbit: bitrateInKbit,
|
|
11183
11774
|
padding,
|
|
11184
11775
|
samplesPerFrame,
|
|
11185
11776
|
samplingFrequency: sampleRate,
|
|
11186
11777
|
layer
|
|
11187
11778
|
});
|
|
11779
|
+
return {
|
|
11780
|
+
frameLength,
|
|
11781
|
+
bitrateInKbit,
|
|
11782
|
+
layer,
|
|
11783
|
+
mpegVersion,
|
|
11784
|
+
numberOfChannels,
|
|
11785
|
+
sampleRate,
|
|
11786
|
+
samplesPerFrame
|
|
11787
|
+
};
|
|
11788
|
+
};
|
|
11789
|
+
var parseMp3PacketHeader = (iterator) => {
|
|
11790
|
+
iterator.startReadingBits();
|
|
11791
|
+
const d = innerParseMp3PacketHeader(iterator);
|
|
11188
11792
|
iterator.stopReadingBits();
|
|
11793
|
+
return d;
|
|
11794
|
+
};
|
|
11795
|
+
var isMp3PacketHeaderHere = (iterator) => {
|
|
11796
|
+
const offset = iterator.counter.getOffset();
|
|
11797
|
+
iterator.startReadingBits();
|
|
11798
|
+
try {
|
|
11799
|
+
const res = innerParseMp3PacketHeader(iterator);
|
|
11800
|
+
iterator.stopReadingBits();
|
|
11801
|
+
iterator.counter.decrement(iterator.counter.getOffset() - offset);
|
|
11802
|
+
return res;
|
|
11803
|
+
} catch {
|
|
11804
|
+
iterator.stopReadingBits();
|
|
11805
|
+
iterator.counter.decrement(iterator.counter.getOffset() - offset);
|
|
11806
|
+
return false;
|
|
11807
|
+
}
|
|
11808
|
+
};
|
|
11809
|
+
var isMp3PacketHeaderHereAndInNext = (iterator) => {
|
|
11810
|
+
const offset = iterator.counter.getOffset();
|
|
11811
|
+
const res = isMp3PacketHeaderHere(iterator);
|
|
11812
|
+
if (!res) {
|
|
11813
|
+
return false;
|
|
11814
|
+
}
|
|
11815
|
+
if (iterator.bytesRemaining() <= res.frameLength) {
|
|
11816
|
+
return true;
|
|
11817
|
+
}
|
|
11818
|
+
iterator.counter.increment(res.frameLength);
|
|
11819
|
+
const isHere = isMp3PacketHeaderHere(iterator);
|
|
11820
|
+
iterator.counter.decrement(iterator.counter.getOffset() - offset);
|
|
11821
|
+
return isHere;
|
|
11822
|
+
};
|
|
11823
|
+
|
|
11824
|
+
// src/containers/mp3/seek/audio-sample-from-cbr.ts
|
|
11825
|
+
var getAudioSampleFromCbr = ({
|
|
11826
|
+
bitrateInKbit,
|
|
11827
|
+
initialOffset,
|
|
11828
|
+
layer,
|
|
11829
|
+
sampleRate,
|
|
11830
|
+
samplesPerFrame,
|
|
11831
|
+
data,
|
|
11832
|
+
state
|
|
11833
|
+
}) => {
|
|
11834
|
+
const avgLength = getAverageMpegFrameLength({
|
|
11835
|
+
bitrateKbit: bitrateInKbit,
|
|
11836
|
+
layer,
|
|
11837
|
+
samplesPerFrame,
|
|
11838
|
+
samplingFrequency: sampleRate
|
|
11839
|
+
});
|
|
11840
|
+
const mp3Info = state.mp3.getMp3Info();
|
|
11841
|
+
if (!mp3Info) {
|
|
11842
|
+
throw new Error("No MP3 info");
|
|
11843
|
+
}
|
|
11844
|
+
const nthFrame = Math.round((initialOffset - state.mediaSection.getMediaSectionAssertOnlyOne().start) / avgLength);
|
|
11845
|
+
const durationInSeconds = samplesPerFrame / sampleRate;
|
|
11846
|
+
const timeInSeconds = nthFrame * samplesPerFrame / sampleRate;
|
|
11847
|
+
const timestamp = Math.round(timeInSeconds * 1e6);
|
|
11848
|
+
const duration2 = Math.round(durationInSeconds * 1e6);
|
|
11849
|
+
const audioSample = {
|
|
11850
|
+
data,
|
|
11851
|
+
cts: timestamp,
|
|
11852
|
+
dts: timestamp,
|
|
11853
|
+
duration: duration2,
|
|
11854
|
+
offset: initialOffset,
|
|
11855
|
+
timescale: 1e6,
|
|
11856
|
+
timestamp,
|
|
11857
|
+
trackId: 0,
|
|
11858
|
+
type: "key"
|
|
11859
|
+
};
|
|
11860
|
+
return { audioSample, timeInSeconds, durationInSeconds };
|
|
11861
|
+
};
|
|
11862
|
+
|
|
11863
|
+
// src/containers/mp3/seek/audio-sample-from-vbr.ts
|
|
11864
|
+
var getAudioSampleFromVbr = ({
|
|
11865
|
+
info,
|
|
11866
|
+
position,
|
|
11867
|
+
mp3Info,
|
|
11868
|
+
data
|
|
11869
|
+
}) => {
|
|
11870
|
+
if (!mp3Info) {
|
|
11871
|
+
throw new Error("No MP3 info");
|
|
11872
|
+
}
|
|
11873
|
+
const samplesPerFrame = getSamplesPerMpegFrame({
|
|
11874
|
+
layer: mp3Info.layer,
|
|
11875
|
+
mpegVersion: mp3Info.mpegVersion
|
|
11876
|
+
});
|
|
11877
|
+
const wholeFileDuration = getDurationFromMp3Xing({
|
|
11878
|
+
samplesPerFrame,
|
|
11879
|
+
xingData: info.xingData
|
|
11880
|
+
});
|
|
11881
|
+
if (!info.xingData.fileSize) {
|
|
11882
|
+
throw new Error("file size");
|
|
11883
|
+
}
|
|
11884
|
+
if (!info.xingData.tableOfContents) {
|
|
11885
|
+
throw new Error("table of contents");
|
|
11886
|
+
}
|
|
11887
|
+
const timeInSeconds = getTimeFromPosition({
|
|
11888
|
+
durationInSeconds: wholeFileDuration,
|
|
11889
|
+
fileSize: info.xingData.fileSize,
|
|
11890
|
+
position,
|
|
11891
|
+
tableOfContents: info.xingData.tableOfContents
|
|
11892
|
+
});
|
|
11893
|
+
const durationInSeconds = samplesPerFrame / info.xingData.sampleRate;
|
|
11894
|
+
const timestamp = Math.round(timeInSeconds * 1e6);
|
|
11895
|
+
const duration2 = Math.round(durationInSeconds * 1e6);
|
|
11896
|
+
const audioSample = {
|
|
11897
|
+
data,
|
|
11898
|
+
cts: timestamp,
|
|
11899
|
+
dts: timestamp,
|
|
11900
|
+
duration: duration2,
|
|
11901
|
+
offset: position,
|
|
11902
|
+
timescale: 1e6,
|
|
11903
|
+
timestamp,
|
|
11904
|
+
trackId: 0,
|
|
11905
|
+
type: "key"
|
|
11906
|
+
};
|
|
11907
|
+
return { timeInSeconds, audioSample, durationInSeconds };
|
|
11908
|
+
};
|
|
11909
|
+
|
|
11910
|
+
// src/containers/mp3/parse-mpeg-header.ts
|
|
11911
|
+
var parseMpegHeader = async ({
|
|
11912
|
+
state
|
|
11913
|
+
}) => {
|
|
11914
|
+
const { iterator } = state;
|
|
11915
|
+
const initialOffset = iterator.counter.getOffset();
|
|
11916
|
+
if (iterator.bytesRemaining() < 32) {
|
|
11917
|
+
return;
|
|
11918
|
+
}
|
|
11919
|
+
const {
|
|
11920
|
+
frameLength,
|
|
11921
|
+
bitrateInKbit,
|
|
11922
|
+
layer,
|
|
11923
|
+
mpegVersion,
|
|
11924
|
+
numberOfChannels,
|
|
11925
|
+
sampleRate,
|
|
11926
|
+
samplesPerFrame
|
|
11927
|
+
} = parseMp3PacketHeader(iterator);
|
|
11928
|
+
const cbrMp3Info = state.mp3.getMp3BitrateInfo();
|
|
11929
|
+
if (cbrMp3Info && cbrMp3Info.type === "constant") {
|
|
11930
|
+
if (bitrateInKbit !== cbrMp3Info.bitrateInKbit) {
|
|
11931
|
+
throw new Error(`Bitrate mismatch at offset ${initialOffset}: ${bitrateInKbit} !== ${cbrMp3Info.bitrateInKbit}`);
|
|
11932
|
+
}
|
|
11933
|
+
}
|
|
11189
11934
|
const offsetNow = iterator.counter.getOffset();
|
|
11190
11935
|
iterator.counter.decrement(offsetNow - initialOffset);
|
|
11191
11936
|
const data = iterator.getSlice(frameLength);
|
|
11192
|
-
let isInfoTag = false;
|
|
11193
11937
|
if (state.callbacks.tracks.getTracks().length === 0) {
|
|
11194
11938
|
const info = {
|
|
11195
11939
|
layer,
|
|
11196
11940
|
mpegVersion,
|
|
11197
|
-
sampleRate
|
|
11198
|
-
startOfMpegStream: initialOffset
|
|
11941
|
+
sampleRate
|
|
11199
11942
|
};
|
|
11200
11943
|
const asText = new TextDecoder().decode(data);
|
|
11201
|
-
|
|
11202
|
-
|
|
11944
|
+
if (asText.includes("VBRI")) {
|
|
11945
|
+
throw new Error("MP3 files with VBRI are currently unsupported because we have no sample file. Submit this file at remotion.dev/report if you would like us to support this file.");
|
|
11946
|
+
}
|
|
11947
|
+
if (asText.includes("Info")) {
|
|
11948
|
+
return;
|
|
11949
|
+
}
|
|
11950
|
+
const isVbr = asText.includes("Xing");
|
|
11203
11951
|
if (isVbr) {
|
|
11952
|
+
const xingData = parseXing(data);
|
|
11204
11953
|
Log.verbose(state.logLevel, "MP3 has variable bit rate. Requiring whole file to be read");
|
|
11205
|
-
|
|
11206
|
-
|
|
11207
|
-
|
|
11954
|
+
state.mp3.setMp3BitrateInfo({
|
|
11955
|
+
type: "variable",
|
|
11956
|
+
xingData
|
|
11208
11957
|
});
|
|
11958
|
+
return;
|
|
11209
11959
|
}
|
|
11210
|
-
if (!
|
|
11211
|
-
state.
|
|
11212
|
-
|
|
11213
|
-
|
|
11214
|
-
track: {
|
|
11215
|
-
type: "audio",
|
|
11216
|
-
codec: "mp3",
|
|
11217
|
-
codecPrivate: null,
|
|
11218
|
-
codecWithoutConfig: "mp3",
|
|
11219
|
-
description: undefined,
|
|
11220
|
-
numberOfChannels,
|
|
11221
|
-
sampleRate,
|
|
11222
|
-
timescale: 1e6,
|
|
11223
|
-
trackId: 0,
|
|
11224
|
-
trakBox: null
|
|
11225
|
-
},
|
|
11226
|
-
registerAudioSampleCallback: state.callbacks.registerAudioSampleCallback,
|
|
11227
|
-
tracks: state.callbacks.tracks,
|
|
11228
|
-
logLevel: state.logLevel,
|
|
11229
|
-
onAudioTrack: state.onAudioTrack
|
|
11960
|
+
if (!state.mp3.getMp3BitrateInfo()) {
|
|
11961
|
+
state.mp3.setMp3BitrateInfo({
|
|
11962
|
+
bitrateInKbit,
|
|
11963
|
+
type: "constant"
|
|
11230
11964
|
});
|
|
11231
|
-
state.callbacks.tracks.setIsDone(state.logLevel);
|
|
11232
11965
|
}
|
|
11233
|
-
|
|
11234
|
-
|
|
11235
|
-
|
|
11236
|
-
|
|
11237
|
-
|
|
11238
|
-
|
|
11239
|
-
|
|
11240
|
-
|
|
11241
|
-
|
|
11242
|
-
|
|
11243
|
-
|
|
11244
|
-
}
|
|
11245
|
-
const nthFrame = Math.round((initialOffset - mp3Info.startOfMpegStream) / avgLength);
|
|
11246
|
-
const durationInSeconds = samplesPerFrame / sampleRate;
|
|
11247
|
-
const timeInSeconds = nthFrame * samplesPerFrame / sampleRate;
|
|
11248
|
-
const timestamp = Math.round(timeInSeconds * 1e6);
|
|
11249
|
-
const duration2 = Math.round(durationInSeconds * 1e6);
|
|
11250
|
-
await emitAudioSample({
|
|
11251
|
-
trackId: 0,
|
|
11252
|
-
audioSample: {
|
|
11253
|
-
data,
|
|
11254
|
-
cts: timestamp,
|
|
11255
|
-
dts: timestamp,
|
|
11256
|
-
duration: duration2,
|
|
11257
|
-
offset: initialOffset,
|
|
11966
|
+
state.mp3.setMp3Info(info);
|
|
11967
|
+
await registerAudioTrack({
|
|
11968
|
+
container: "mp3",
|
|
11969
|
+
track: {
|
|
11970
|
+
type: "audio",
|
|
11971
|
+
codec: "mp3",
|
|
11972
|
+
codecPrivate: null,
|
|
11973
|
+
codecWithoutConfig: "mp3",
|
|
11974
|
+
description: undefined,
|
|
11975
|
+
numberOfChannels,
|
|
11976
|
+
sampleRate,
|
|
11258
11977
|
timescale: 1e6,
|
|
11259
|
-
timestamp,
|
|
11260
11978
|
trackId: 0,
|
|
11261
|
-
|
|
11979
|
+
trakBox: null
|
|
11262
11980
|
},
|
|
11263
|
-
|
|
11981
|
+
registerAudioSampleCallback: state.callbacks.registerAudioSampleCallback,
|
|
11982
|
+
tracks: state.callbacks.tracks,
|
|
11983
|
+
logLevel: state.logLevel,
|
|
11984
|
+
onAudioTrack: state.onAudioTrack
|
|
11264
11985
|
});
|
|
11986
|
+
state.callbacks.tracks.setIsDone(state.logLevel);
|
|
11987
|
+
state.mediaSection.addMediaSection({
|
|
11988
|
+
start: initialOffset,
|
|
11989
|
+
size: state.contentLength - initialOffset
|
|
11990
|
+
});
|
|
11991
|
+
}
|
|
11992
|
+
const bitrateInfo = state.mp3.getMp3BitrateInfo();
|
|
11993
|
+
if (!bitrateInfo) {
|
|
11994
|
+
throw new Error("No bitrate info");
|
|
11995
|
+
}
|
|
11996
|
+
const sample = bitrateInfo.type === "constant" ? getAudioSampleFromCbr({
|
|
11997
|
+
bitrateInKbit,
|
|
11998
|
+
data,
|
|
11999
|
+
initialOffset,
|
|
12000
|
+
layer,
|
|
12001
|
+
sampleRate,
|
|
12002
|
+
samplesPerFrame,
|
|
12003
|
+
state
|
|
12004
|
+
}) : getAudioSampleFromVbr({
|
|
12005
|
+
data,
|
|
12006
|
+
info: bitrateInfo,
|
|
12007
|
+
mp3Info: state.mp3.getMp3Info(),
|
|
12008
|
+
position: initialOffset
|
|
12009
|
+
});
|
|
12010
|
+
const { audioSample, timeInSeconds, durationInSeconds } = sample;
|
|
12011
|
+
state.mp3.audioSamples.addSample({
|
|
12012
|
+
timeInSeconds,
|
|
12013
|
+
offset: initialOffset,
|
|
12014
|
+
durationInSeconds
|
|
12015
|
+
});
|
|
12016
|
+
await state.callbacks.onAudioSample(0, audioSample);
|
|
12017
|
+
};
|
|
12018
|
+
|
|
12019
|
+
// src/containers/mp3/seek/wait-until-syncword.ts
|
|
12020
|
+
var discardUntilSyncword = ({
|
|
12021
|
+
iterator
|
|
12022
|
+
}) => {
|
|
12023
|
+
while (true) {
|
|
12024
|
+
const next2Bytes = iterator.getUint8();
|
|
12025
|
+
if (next2Bytes !== 255) {
|
|
12026
|
+
continue;
|
|
12027
|
+
}
|
|
12028
|
+
const nextByte = iterator.getUint8();
|
|
12029
|
+
const mask = 224;
|
|
12030
|
+
if ((nextByte & mask) !== mask) {
|
|
12031
|
+
continue;
|
|
12032
|
+
}
|
|
12033
|
+
iterator.counter.decrement(2);
|
|
12034
|
+
if (isMp3PacketHeaderHereAndInNext(iterator)) {
|
|
12035
|
+
break;
|
|
12036
|
+
} else {
|
|
12037
|
+
iterator.counter.increment(2);
|
|
12038
|
+
}
|
|
11265
12039
|
}
|
|
11266
12040
|
};
|
|
11267
12041
|
|
|
@@ -11271,6 +12045,13 @@ var parseMp3 = async (state) => {
|
|
|
11271
12045
|
if (iterator.bytesRemaining() < 3) {
|
|
11272
12046
|
return null;
|
|
11273
12047
|
}
|
|
12048
|
+
if (state.mediaSection.isCurrentByteInMediaSection(iterator) === "in-section") {
|
|
12049
|
+
discardUntilSyncword({ iterator });
|
|
12050
|
+
await parseMpegHeader({
|
|
12051
|
+
state
|
|
12052
|
+
});
|
|
12053
|
+
return null;
|
|
12054
|
+
}
|
|
11274
12055
|
const { returnToCheckpoint } = iterator.startCheckpoint();
|
|
11275
12056
|
const bytes = iterator.getSlice(3);
|
|
11276
12057
|
returnToCheckpoint();
|
|
@@ -11302,6 +12083,7 @@ var isMoviAtom = (iterator, ckId) => {
|
|
|
11302
12083
|
};
|
|
11303
12084
|
|
|
11304
12085
|
// src/containers/riff/parse-avih.ts
|
|
12086
|
+
var AVIF_HAS_INDEX = 16;
|
|
11305
12087
|
var parseAvih = ({
|
|
11306
12088
|
iterator,
|
|
11307
12089
|
size
|
|
@@ -11317,10 +12099,12 @@ var parseAvih = ({
|
|
|
11317
12099
|
const suggestedBufferSize = iterator.getUint32Le();
|
|
11318
12100
|
const width = iterator.getUint32Le();
|
|
11319
12101
|
const height = iterator.getUint32Le();
|
|
12102
|
+
const hasIndex = (flags & AVIF_HAS_INDEX) !== 0;
|
|
11320
12103
|
iterator.discard(16);
|
|
11321
12104
|
expectNoMoreBytes();
|
|
11322
12105
|
return {
|
|
11323
12106
|
type: "avih-box",
|
|
12107
|
+
hasIndex,
|
|
11324
12108
|
microSecPerFrame: dwMicroSecPerFrame,
|
|
11325
12109
|
maxBytesPerSecond: dwMaxBytesPerSec,
|
|
11326
12110
|
paddingGranularity,
|
|
@@ -11334,6 +12118,54 @@ var parseAvih = ({
|
|
|
11334
12118
|
};
|
|
11335
12119
|
};
|
|
11336
12120
|
|
|
12121
|
+
// src/containers/riff/parse-idx1.ts
|
|
12122
|
+
var AVIIF_KEYFRAME = 16;
|
|
12123
|
+
var parseIdx1 = ({
|
|
12124
|
+
iterator,
|
|
12125
|
+
size
|
|
12126
|
+
}) => {
|
|
12127
|
+
const box = iterator.startBox(size);
|
|
12128
|
+
const offset = iterator.counter.getOffset();
|
|
12129
|
+
const entries = [];
|
|
12130
|
+
const sampleCounts = {};
|
|
12131
|
+
let videoTrackIndex = null;
|
|
12132
|
+
while (iterator.counter.getOffset() < offset + size) {
|
|
12133
|
+
const chunkId = iterator.getByteString(4, false);
|
|
12134
|
+
const flags = iterator.getUint32Le();
|
|
12135
|
+
const moffset = iterator.getUint32Le();
|
|
12136
|
+
const msize = iterator.getUint32Le();
|
|
12137
|
+
const chunk = chunkId.match(/^([0-9]{2})(wb|dc)$/);
|
|
12138
|
+
const isVideo = chunkId.endsWith("dc");
|
|
12139
|
+
if (isVideo) {
|
|
12140
|
+
videoTrackIndex = chunk ? parseInt(chunk[1], 10) : null;
|
|
12141
|
+
}
|
|
12142
|
+
const trackId = chunk ? parseInt(chunk[1], 10) : null;
|
|
12143
|
+
if (trackId === null) {
|
|
12144
|
+
continue;
|
|
12145
|
+
}
|
|
12146
|
+
if (!sampleCounts[trackId]) {
|
|
12147
|
+
sampleCounts[trackId] = 0;
|
|
12148
|
+
}
|
|
12149
|
+
const isKeyFrame = (flags & AVIIF_KEYFRAME) !== 0;
|
|
12150
|
+
if (isKeyFrame) {
|
|
12151
|
+
entries.push({
|
|
12152
|
+
flags,
|
|
12153
|
+
id: chunkId,
|
|
12154
|
+
offset: moffset,
|
|
12155
|
+
size: msize,
|
|
12156
|
+
sampleCounts: { ...sampleCounts }
|
|
12157
|
+
});
|
|
12158
|
+
}
|
|
12159
|
+
sampleCounts[trackId]++;
|
|
12160
|
+
}
|
|
12161
|
+
box.expectNoMoreBytes();
|
|
12162
|
+
return {
|
|
12163
|
+
type: "idx1-box",
|
|
12164
|
+
entries,
|
|
12165
|
+
videoTrackIndex
|
|
12166
|
+
};
|
|
12167
|
+
};
|
|
12168
|
+
|
|
11337
12169
|
// src/containers/riff/parse-isft.ts
|
|
11338
12170
|
var parseIsft = ({
|
|
11339
12171
|
iterator,
|
|
@@ -11355,9 +12187,9 @@ var parseIsft = ({
|
|
|
11355
12187
|
// src/containers/riff/parse-list-box.ts
|
|
11356
12188
|
var parseListBox = async ({
|
|
11357
12189
|
size,
|
|
11358
|
-
|
|
12190
|
+
iterator,
|
|
12191
|
+
stateIfExpectingSideEffects
|
|
11359
12192
|
}) => {
|
|
11360
|
-
const { iterator } = state;
|
|
11361
12193
|
const counter = iterator.counter.getOffset();
|
|
11362
12194
|
const listType = iterator.getByteString(4, false);
|
|
11363
12195
|
if (listType === "movi") {
|
|
@@ -11366,10 +12198,16 @@ var parseListBox = async ({
|
|
|
11366
12198
|
const boxes = [];
|
|
11367
12199
|
const maxOffset = counter + size;
|
|
11368
12200
|
while (iterator.counter.getOffset() < maxOffset) {
|
|
11369
|
-
const box = await expectRiffBox(
|
|
12201
|
+
const box = await expectRiffBox({
|
|
12202
|
+
iterator,
|
|
12203
|
+
stateIfExpectingSideEffects
|
|
12204
|
+
});
|
|
11370
12205
|
if (box === null) {
|
|
11371
12206
|
throw new Error("Unexpected result");
|
|
11372
12207
|
}
|
|
12208
|
+
if (stateIfExpectingSideEffects) {
|
|
12209
|
+
await postProcessRiffBox(stateIfExpectingSideEffects, box);
|
|
12210
|
+
}
|
|
11373
12211
|
boxes.push(box);
|
|
11374
12212
|
}
|
|
11375
12213
|
return {
|
|
@@ -11511,11 +12349,15 @@ var parseStrh = ({
|
|
|
11511
12349
|
var parseRiffBox = ({
|
|
11512
12350
|
size,
|
|
11513
12351
|
id,
|
|
11514
|
-
|
|
12352
|
+
iterator,
|
|
12353
|
+
stateIfExpectingSideEffects
|
|
11515
12354
|
}) => {
|
|
11516
|
-
const { iterator } = state;
|
|
11517
12355
|
if (id === "LIST") {
|
|
11518
|
-
return parseListBox({
|
|
12356
|
+
return parseListBox({
|
|
12357
|
+
size,
|
|
12358
|
+
iterator,
|
|
12359
|
+
stateIfExpectingSideEffects
|
|
12360
|
+
});
|
|
11519
12361
|
}
|
|
11520
12362
|
if (id === "ISFT") {
|
|
11521
12363
|
return Promise.resolve(parseIsft({ iterator, size }));
|
|
@@ -11526,41 +12368,20 @@ var parseRiffBox = ({
|
|
|
11526
12368
|
if (id === "strh") {
|
|
11527
12369
|
return Promise.resolve(parseStrh({ iterator, size }));
|
|
11528
12370
|
}
|
|
11529
|
-
|
|
11530
|
-
|
|
11531
|
-
type: "riff-box",
|
|
11532
|
-
size,
|
|
11533
|
-
id
|
|
11534
|
-
};
|
|
11535
|
-
return Promise.resolve(box);
|
|
11536
|
-
};
|
|
11537
|
-
|
|
11538
|
-
// src/containers/riff/expect-riff-box.ts
|
|
11539
|
-
var expectRiffBox = async (state) => {
|
|
11540
|
-
const { iterator } = state;
|
|
11541
|
-
if (state.iterator.bytesRemaining() < 16) {
|
|
11542
|
-
return null;
|
|
11543
|
-
}
|
|
11544
|
-
const checkpoint = iterator.startCheckpoint();
|
|
11545
|
-
const ckId = iterator.getByteString(4, false);
|
|
11546
|
-
const ckSize = iterator.getUint32Le();
|
|
11547
|
-
if (isMoviAtom(iterator, ckId)) {
|
|
11548
|
-
iterator.discard(4);
|
|
11549
|
-
state.mediaSection.addMediaSection({
|
|
11550
|
-
start: iterator.counter.getOffset(),
|
|
11551
|
-
size: ckSize - 4
|
|
11552
|
-
});
|
|
11553
|
-
return null;
|
|
11554
|
-
}
|
|
11555
|
-
if (iterator.bytesRemaining() < ckSize) {
|
|
11556
|
-
checkpoint.returnToCheckpoint();
|
|
11557
|
-
return null;
|
|
12371
|
+
if (id === "idx1") {
|
|
12372
|
+
return Promise.resolve(parseIdx1({ iterator, size }));
|
|
11558
12373
|
}
|
|
11559
|
-
|
|
11560
|
-
|
|
11561
|
-
|
|
11562
|
-
|
|
11563
|
-
|
|
12374
|
+
iterator.discard(size);
|
|
12375
|
+
const box = {
|
|
12376
|
+
type: "riff-box",
|
|
12377
|
+
size,
|
|
12378
|
+
id
|
|
12379
|
+
};
|
|
12380
|
+
return Promise.resolve(box);
|
|
12381
|
+
};
|
|
12382
|
+
|
|
12383
|
+
// src/containers/riff/expect-riff-box.ts
|
|
12384
|
+
var postProcessRiffBox = async (state, box) => {
|
|
11564
12385
|
if (box.type === "strh-box") {
|
|
11565
12386
|
if (box.strf.type === "strf-box-audio" && state.onAudioTrack) {
|
|
11566
12387
|
const audioTrack = makeAviAudioTrack({
|
|
@@ -11590,6 +12411,41 @@ var expectRiffBox = async (state) => {
|
|
|
11590
12411
|
}
|
|
11591
12412
|
state.riff.incrementNextTrackIndex();
|
|
11592
12413
|
}
|
|
12414
|
+
};
|
|
12415
|
+
var expectRiffBox = async ({
|
|
12416
|
+
iterator,
|
|
12417
|
+
stateIfExpectingSideEffects
|
|
12418
|
+
}) => {
|
|
12419
|
+
if (iterator.bytesRemaining() < 16) {
|
|
12420
|
+
return null;
|
|
12421
|
+
}
|
|
12422
|
+
const checkpoint = iterator.startCheckpoint();
|
|
12423
|
+
const ckId = iterator.getByteString(4, false);
|
|
12424
|
+
const ckSize = iterator.getUint32Le();
|
|
12425
|
+
if (isMoviAtom(iterator, ckId)) {
|
|
12426
|
+
iterator.discard(4);
|
|
12427
|
+
if (!stateIfExpectingSideEffects) {
|
|
12428
|
+
throw new Error("No state if expecting side effects");
|
|
12429
|
+
}
|
|
12430
|
+
stateIfExpectingSideEffects.mediaSection.addMediaSection({
|
|
12431
|
+
start: iterator.counter.getOffset(),
|
|
12432
|
+
size: ckSize - 4
|
|
12433
|
+
});
|
|
12434
|
+
if (riffHasIndex(stateIfExpectingSideEffects.structure.getRiffStructure())) {
|
|
12435
|
+
stateIfExpectingSideEffects.riff.lazyIdx1.triggerLoad(iterator.counter.getOffset() + ckSize - 4);
|
|
12436
|
+
}
|
|
12437
|
+
return null;
|
|
12438
|
+
}
|
|
12439
|
+
if (iterator.bytesRemaining() < ckSize) {
|
|
12440
|
+
checkpoint.returnToCheckpoint();
|
|
12441
|
+
return null;
|
|
12442
|
+
}
|
|
12443
|
+
const box = await parseRiffBox({
|
|
12444
|
+
id: ckId,
|
|
12445
|
+
size: ckSize,
|
|
12446
|
+
iterator,
|
|
12447
|
+
stateIfExpectingSideEffects
|
|
12448
|
+
});
|
|
11593
12449
|
return box;
|
|
11594
12450
|
};
|
|
11595
12451
|
|
|
@@ -11612,13 +12468,13 @@ var handleChunk = async ({
|
|
|
11612
12468
|
ckSize
|
|
11613
12469
|
}) => {
|
|
11614
12470
|
const { iterator } = state;
|
|
11615
|
-
const offset = iterator.counter.getOffset();
|
|
12471
|
+
const offset = iterator.counter.getOffset() - 8;
|
|
11616
12472
|
const videoChunk = ckId.match(/^([0-9]{2})dc$/);
|
|
11617
12473
|
if (videoChunk) {
|
|
11618
12474
|
const trackId = parseInt(videoChunk[1], 10);
|
|
11619
12475
|
const strh = getStrhForIndex(state.structure.getRiffStructure(), trackId);
|
|
11620
12476
|
const samplesPerSecond = strh.rate / strh.scale;
|
|
11621
|
-
const nthSample = state.
|
|
12477
|
+
const nthSample = state.riff.sampleCounter.getSamplesForTrack(trackId);
|
|
11622
12478
|
const timeInSec = nthSample / samplesPerSecond;
|
|
11623
12479
|
const timestamp = timeInSec;
|
|
11624
12480
|
const data = iterator.getSlice(ckSize);
|
|
@@ -11630,24 +12486,22 @@ var handleChunk = async ({
|
|
|
11630
12486
|
await state.riff.onProfile({ pps: ppsProfile, sps: avcProfile });
|
|
11631
12487
|
state.callbacks.tracks.setIsDone(state.logLevel);
|
|
11632
12488
|
}
|
|
11633
|
-
|
|
11634
|
-
|
|
11635
|
-
|
|
11636
|
-
|
|
11637
|
-
|
|
11638
|
-
|
|
11639
|
-
|
|
11640
|
-
|
|
11641
|
-
|
|
11642
|
-
|
|
11643
|
-
|
|
11644
|
-
|
|
11645
|
-
|
|
11646
|
-
},
|
|
11647
|
-
timescale: 1
|
|
11648
|
-
}),
|
|
11649
|
-
callbacks: state.callbacks
|
|
12489
|
+
const videoSample = convertAudioOrVideoSampleToWebCodecsTimestamps({
|
|
12490
|
+
sample: {
|
|
12491
|
+
cts: timestamp,
|
|
12492
|
+
dts: timestamp,
|
|
12493
|
+
data,
|
|
12494
|
+
duration: undefined,
|
|
12495
|
+
timestamp,
|
|
12496
|
+
trackId,
|
|
12497
|
+
type: keyOrDelta,
|
|
12498
|
+
offset,
|
|
12499
|
+
timescale: samplesPerSecond
|
|
12500
|
+
},
|
|
12501
|
+
timescale: 1
|
|
11650
12502
|
});
|
|
12503
|
+
state.riff.sampleCounter.onVideoSample(trackId, videoSample);
|
|
12504
|
+
await state.callbacks.onVideoSample(trackId, videoSample);
|
|
11651
12505
|
return;
|
|
11652
12506
|
}
|
|
11653
12507
|
const audioChunk = ckId.match(/^([0-9]{2})wb$/);
|
|
@@ -11655,28 +12509,26 @@ var handleChunk = async ({
|
|
|
11655
12509
|
const trackId = parseInt(audioChunk[1], 10);
|
|
11656
12510
|
const strh = getStrhForIndex(state.structure.getRiffStructure(), trackId);
|
|
11657
12511
|
const samplesPerSecond = strh.rate / strh.scale;
|
|
11658
|
-
const nthSample = state.
|
|
12512
|
+
const nthSample = state.riff.sampleCounter.getSamplesForTrack(trackId);
|
|
11659
12513
|
const timeInSec = nthSample / samplesPerSecond;
|
|
11660
12514
|
const timestamp = timeInSec;
|
|
11661
12515
|
const data = iterator.getSlice(ckSize);
|
|
11662
|
-
|
|
11663
|
-
|
|
11664
|
-
|
|
11665
|
-
|
|
11666
|
-
|
|
11667
|
-
|
|
11668
|
-
|
|
11669
|
-
|
|
11670
|
-
|
|
11671
|
-
|
|
11672
|
-
|
|
11673
|
-
|
|
11674
|
-
|
|
11675
|
-
},
|
|
11676
|
-
timescale: 1
|
|
11677
|
-
}),
|
|
11678
|
-
callbacks: state.callbacks
|
|
12516
|
+
const audioSample = convertAudioOrVideoSampleToWebCodecsTimestamps({
|
|
12517
|
+
sample: {
|
|
12518
|
+
cts: timestamp,
|
|
12519
|
+
dts: timestamp,
|
|
12520
|
+
data,
|
|
12521
|
+
duration: undefined,
|
|
12522
|
+
timestamp,
|
|
12523
|
+
trackId,
|
|
12524
|
+
type: "key",
|
|
12525
|
+
offset,
|
|
12526
|
+
timescale: samplesPerSecond
|
|
12527
|
+
},
|
|
12528
|
+
timescale: 1
|
|
11679
12529
|
});
|
|
12530
|
+
state.riff.sampleCounter.onAudioSample(trackId, audioSample);
|
|
12531
|
+
await state.callbacks.onAudioSample(trackId, audioSample);
|
|
11680
12532
|
}
|
|
11681
12533
|
};
|
|
11682
12534
|
var parseMovi = async ({
|
|
@@ -11733,8 +12585,12 @@ var parseRiffBody = async (state) => {
|
|
|
11733
12585
|
await parseMediaSection(state);
|
|
11734
12586
|
return null;
|
|
11735
12587
|
}
|
|
11736
|
-
const box = await expectRiffBox(
|
|
12588
|
+
const box = await expectRiffBox({
|
|
12589
|
+
iterator: state.iterator,
|
|
12590
|
+
stateIfExpectingSideEffects: state
|
|
12591
|
+
});
|
|
11737
12592
|
if (box !== null) {
|
|
12593
|
+
await postProcessRiffBox(state, box);
|
|
11738
12594
|
const structure = state.structure.getRiffStructure();
|
|
11739
12595
|
structure.boxes.push(box);
|
|
11740
12596
|
}
|
|
@@ -12116,14 +12972,11 @@ var handleAacPacket = async ({
|
|
|
12116
12972
|
offset,
|
|
12117
12973
|
timescale: MPEG_TIMESCALE
|
|
12118
12974
|
};
|
|
12119
|
-
|
|
12120
|
-
|
|
12121
|
-
|
|
12122
|
-
sample,
|
|
12123
|
-
timescale: MPEG_TIMESCALE
|
|
12124
|
-
}),
|
|
12125
|
-
callbacks: sampleCallbacks
|
|
12975
|
+
const audioSample = convertAudioOrVideoSampleToWebCodecsTimestamps({
|
|
12976
|
+
sample,
|
|
12977
|
+
timescale: MPEG_TIMESCALE
|
|
12126
12978
|
});
|
|
12979
|
+
await sampleCallbacks.onAudioSample(programId, audioSample);
|
|
12127
12980
|
transportStream.lastEmittedSample.setLastEmittedSample(sample);
|
|
12128
12981
|
};
|
|
12129
12982
|
|
|
@@ -12703,24 +13556,21 @@ var parseMediaSection2 = async ({
|
|
|
12703
13556
|
const duration2 = toRead / (fmtBox.sampleRate * fmtBox.blockAlign);
|
|
12704
13557
|
const timestamp = (offset - videoSection.start) / (fmtBox.sampleRate * fmtBox.blockAlign);
|
|
12705
13558
|
const data = iterator.getSlice(toRead);
|
|
12706
|
-
|
|
12707
|
-
|
|
12708
|
-
|
|
12709
|
-
|
|
12710
|
-
|
|
12711
|
-
|
|
12712
|
-
|
|
12713
|
-
|
|
12714
|
-
|
|
12715
|
-
|
|
12716
|
-
|
|
12717
|
-
|
|
12718
|
-
|
|
12719
|
-
},
|
|
12720
|
-
timescale: 1
|
|
12721
|
-
}),
|
|
12722
|
-
callbacks: state.callbacks
|
|
13559
|
+
const audioSample = convertAudioOrVideoSampleToWebCodecsTimestamps({
|
|
13560
|
+
sample: {
|
|
13561
|
+
cts: timestamp,
|
|
13562
|
+
dts: timestamp,
|
|
13563
|
+
data,
|
|
13564
|
+
duration: duration2,
|
|
13565
|
+
timestamp,
|
|
13566
|
+
trackId: 0,
|
|
13567
|
+
type: "key",
|
|
13568
|
+
offset,
|
|
13569
|
+
timescale: 1e6
|
|
13570
|
+
},
|
|
13571
|
+
timescale: 1
|
|
12723
13572
|
});
|
|
13573
|
+
await state.callbacks.onAudioSample(0, audioSample);
|
|
12724
13574
|
return null;
|
|
12725
13575
|
};
|
|
12726
13576
|
|
|
@@ -13048,11 +13898,7 @@ var postprocessEbml = async ({
|
|
|
13048
13898
|
if (ebml.type === "Block" || ebml.type === "SimpleBlock") {
|
|
13049
13899
|
const sample = getSampleFromBlock(ebml, webmState, offset, structureState2);
|
|
13050
13900
|
if (sample.type === "video-sample") {
|
|
13051
|
-
await
|
|
13052
|
-
trackId: sample.videoSample.trackId,
|
|
13053
|
-
videoSample: sample.videoSample,
|
|
13054
|
-
callbacks
|
|
13055
|
-
});
|
|
13901
|
+
await callbacks.onVideoSample(sample.videoSample.trackId, sample.videoSample);
|
|
13056
13902
|
return {
|
|
13057
13903
|
type: "Block",
|
|
13058
13904
|
value: new Uint8Array([]),
|
|
@@ -13060,11 +13906,7 @@ var postprocessEbml = async ({
|
|
|
13060
13906
|
};
|
|
13061
13907
|
}
|
|
13062
13908
|
if (sample.type === "audio-sample") {
|
|
13063
|
-
await
|
|
13064
|
-
trackId: sample.audioSample.trackId,
|
|
13065
|
-
audioSample: sample.audioSample,
|
|
13066
|
-
callbacks
|
|
13067
|
-
});
|
|
13909
|
+
await callbacks.onAudioSample(sample.audioSample.trackId, sample.audioSample);
|
|
13068
13910
|
return {
|
|
13069
13911
|
type: "Block",
|
|
13070
13912
|
value: new Uint8Array([]),
|
|
@@ -13091,11 +13933,7 @@ var postprocessEbml = async ({
|
|
|
13091
13933
|
...sample.partialVideoSample,
|
|
13092
13934
|
type: hasReferenceBlock ? "delta" : "key"
|
|
13093
13935
|
};
|
|
13094
|
-
await
|
|
13095
|
-
trackId: sample.partialVideoSample.trackId,
|
|
13096
|
-
videoSample: completeFrame,
|
|
13097
|
-
callbacks
|
|
13098
|
-
});
|
|
13936
|
+
await callbacks.onVideoSample(sample.partialVideoSample.trackId, completeFrame);
|
|
13099
13937
|
}
|
|
13100
13938
|
return {
|
|
13101
13939
|
type: "BlockGroup",
|
|
@@ -13521,6 +14359,7 @@ var parseLoop = async ({
|
|
|
13521
14359
|
if (skip !== null) {
|
|
13522
14360
|
state.increaseSkippedBytes(skip.skipTo - state.iterator.counter.getOffset());
|
|
13523
14361
|
if (skip.skipTo === state.contentLength) {
|
|
14362
|
+
state.iterator.discard(skip.skipTo - state.iterator.counter.getOffset());
|
|
13524
14363
|
Log.verbose(state.logLevel, "Skipped to end of file, not fetching.");
|
|
13525
14364
|
break;
|
|
13526
14365
|
}
|
|
@@ -13567,6 +14406,19 @@ var parseLoop = async ({
|
|
|
13567
14406
|
iterationWithThisOffset = 0;
|
|
13568
14407
|
}
|
|
13569
14408
|
}
|
|
14409
|
+
state.samplesObserved.setLastSampleObserved();
|
|
14410
|
+
if (state.controller._internals.seekSignal.getSeek()) {
|
|
14411
|
+
Log.verbose(state.logLevel, "Reached end of samples, but there is a pending seek. Trying to seek...");
|
|
14412
|
+
await workOnSeekRequest(getWorkOnSeekRequestOptions(state));
|
|
14413
|
+
if (state.controller._internals.seekSignal.getSeek()) {
|
|
14414
|
+
throw new Error("Reached the end of the file even though a seek was requested. This is likely a bug in the parser. You can report this at https://remotion.dev/report and we will fix it as soon as possible.");
|
|
14415
|
+
}
|
|
14416
|
+
await parseLoop({
|
|
14417
|
+
onError,
|
|
14418
|
+
throttledState,
|
|
14419
|
+
state
|
|
14420
|
+
});
|
|
14421
|
+
}
|
|
13570
14422
|
};
|
|
13571
14423
|
|
|
13572
14424
|
// src/print-timings.ts
|
|
@@ -13617,6 +14469,22 @@ var setSeekingHints = ({
|
|
|
13617
14469
|
setSeekingHintsForWebm({ hints, state });
|
|
13618
14470
|
return;
|
|
13619
14471
|
}
|
|
14472
|
+
if (hints.type === "flac-seeking-hints") {
|
|
14473
|
+
setSeekingHintsForFlac({ hints, state });
|
|
14474
|
+
return;
|
|
14475
|
+
}
|
|
14476
|
+
if (hints.type === "riff-seeking-hints") {
|
|
14477
|
+
setSeekingHintsForRiff({ hints, state });
|
|
14478
|
+
return;
|
|
14479
|
+
}
|
|
14480
|
+
if (hints.type === "mp3-seeking-hints") {
|
|
14481
|
+
setSeekingHintsForMp3({ hints, state });
|
|
14482
|
+
return;
|
|
14483
|
+
}
|
|
14484
|
+
if (hints.type === "aac-seeking-hints") {
|
|
14485
|
+
setSeekingHintsForAac();
|
|
14486
|
+
return;
|
|
14487
|
+
}
|
|
13620
14488
|
throw new Error(`Unknown seeking hints type: ${hints}`);
|
|
13621
14489
|
};
|
|
13622
14490
|
|
|
@@ -13659,18 +14527,39 @@ var getFieldsFromCallback = ({
|
|
|
13659
14527
|
return newFields;
|
|
13660
14528
|
};
|
|
13661
14529
|
|
|
14530
|
+
// src/state/audio-sample-map.ts
|
|
14531
|
+
var audioSampleMapState = () => {
|
|
14532
|
+
let map = [];
|
|
14533
|
+
const addSample = (audioSampleOffset) => {
|
|
14534
|
+
if (map.find((m) => m.offset === audioSampleOffset.offset)) {
|
|
14535
|
+
return;
|
|
14536
|
+
}
|
|
14537
|
+
map.push(audioSampleOffset);
|
|
14538
|
+
};
|
|
14539
|
+
return {
|
|
14540
|
+
addSample,
|
|
14541
|
+
getSamples: () => map,
|
|
14542
|
+
setFromSeekingHints: (newMap) => {
|
|
14543
|
+
map = newMap;
|
|
14544
|
+
}
|
|
14545
|
+
};
|
|
14546
|
+
};
|
|
14547
|
+
|
|
13662
14548
|
// src/state/aac-state.ts
|
|
13663
14549
|
var aacState = () => {
|
|
13664
14550
|
const samples = [];
|
|
14551
|
+
const audioSamples = audioSampleMapState();
|
|
13665
14552
|
return {
|
|
13666
14553
|
addSample: ({ offset, size }) => {
|
|
13667
|
-
|
|
13668
|
-
|
|
14554
|
+
const index = samples.findIndex((s) => s.offset === offset);
|
|
14555
|
+
if (index !== -1) {
|
|
14556
|
+
return samples[index];
|
|
13669
14557
|
}
|
|
13670
14558
|
samples.push({ offset, index: samples.length, size });
|
|
13671
14559
|
return samples[samples.length - 1];
|
|
13672
14560
|
},
|
|
13673
|
-
getSamples: () => samples
|
|
14561
|
+
getSamples: () => samples,
|
|
14562
|
+
audioSamples
|
|
13674
14563
|
};
|
|
13675
14564
|
};
|
|
13676
14565
|
|
|
@@ -13723,11 +14612,13 @@ var emittedState = () => {
|
|
|
13723
14612
|
// src/state/flac-state.ts
|
|
13724
14613
|
var flacState = () => {
|
|
13725
14614
|
let blockingBitStrategy;
|
|
14615
|
+
const audioSamples = audioSampleMapState();
|
|
13726
14616
|
return {
|
|
13727
14617
|
setBlockingBitStrategy: (strategy) => {
|
|
13728
14618
|
blockingBitStrategy = strategy;
|
|
13729
14619
|
},
|
|
13730
|
-
getBlockingBitStrategy: () => blockingBitStrategy
|
|
14620
|
+
getBlockingBitStrategy: () => blockingBitStrategy,
|
|
14621
|
+
audioSamples
|
|
13731
14622
|
};
|
|
13732
14623
|
};
|
|
13733
14624
|
|
|
@@ -13837,7 +14728,8 @@ var getMfraSeekingBox = async ({
|
|
|
13837
14728
|
iterator: mfraAtom,
|
|
13838
14729
|
logLevel,
|
|
13839
14730
|
size: parentSize - 8,
|
|
13840
|
-
onlyIfMoovAtomExpected: null
|
|
14731
|
+
onlyIfMoovAtomExpected: null,
|
|
14732
|
+
contentLength
|
|
13841
14733
|
});
|
|
13842
14734
|
};
|
|
13843
14735
|
|
|
@@ -13931,8 +14823,8 @@ var keyframesState = () => {
|
|
|
13931
14823
|
const getKeyframes2 = () => {
|
|
13932
14824
|
return keyframes;
|
|
13933
14825
|
};
|
|
13934
|
-
const setFromSeekingHints = (
|
|
13935
|
-
for (const keyframe of
|
|
14826
|
+
const setFromSeekingHints = (keyframesFromHints) => {
|
|
14827
|
+
for (const keyframe of keyframesFromHints) {
|
|
13936
14828
|
addKeyframe(keyframe);
|
|
13937
14829
|
}
|
|
13938
14830
|
};
|
|
@@ -14218,6 +15110,9 @@ var lazyCuesFetch = ({
|
|
|
14218
15110
|
let sOffset = null;
|
|
14219
15111
|
let result = null;
|
|
14220
15112
|
const triggerLoad = (position, segmentOffset) => {
|
|
15113
|
+
if (result) {
|
|
15114
|
+
return Promise.resolve(result);
|
|
15115
|
+
}
|
|
14221
15116
|
if (prom) {
|
|
14222
15117
|
return prom;
|
|
14223
15118
|
}
|
|
@@ -14243,6 +15138,15 @@ var lazyCuesFetch = ({
|
|
|
14243
15138
|
if (!prom) {
|
|
14244
15139
|
return null;
|
|
14245
15140
|
}
|
|
15141
|
+
if (result) {
|
|
15142
|
+
if (!sOffset) {
|
|
15143
|
+
throw new Error("Segment offset not set");
|
|
15144
|
+
}
|
|
15145
|
+
return {
|
|
15146
|
+
cues: result,
|
|
15147
|
+
segmentOffset: sOffset
|
|
15148
|
+
};
|
|
15149
|
+
}
|
|
14246
15150
|
const cues = await prom;
|
|
14247
15151
|
if (!cues) {
|
|
14248
15152
|
return null;
|
|
@@ -14257,7 +15161,7 @@ var lazyCuesFetch = ({
|
|
|
14257
15161
|
};
|
|
14258
15162
|
const getIfAlreadyLoaded = () => {
|
|
14259
15163
|
if (result) {
|
|
14260
|
-
if (
|
|
15164
|
+
if (sOffset === null) {
|
|
14261
15165
|
throw new Error("Segment offset not set");
|
|
14262
15166
|
}
|
|
14263
15167
|
return {
|
|
@@ -14405,21 +15309,203 @@ var webmState = ({
|
|
|
14405
15309
|
// src/state/mp3.ts
|
|
14406
15310
|
var makeMp3State = () => {
|
|
14407
15311
|
let mp3Info = null;
|
|
14408
|
-
let
|
|
15312
|
+
let bitrateInfo = null;
|
|
15313
|
+
const audioSamples = audioSampleMapState();
|
|
14409
15314
|
return {
|
|
14410
15315
|
getMp3Info: () => mp3Info,
|
|
14411
15316
|
setMp3Info: (info) => {
|
|
14412
15317
|
mp3Info = info;
|
|
14413
15318
|
},
|
|
14414
|
-
|
|
14415
|
-
|
|
14416
|
-
|
|
15319
|
+
getMp3BitrateInfo: () => bitrateInfo,
|
|
15320
|
+
setMp3BitrateInfo: (info) => {
|
|
15321
|
+
bitrateInfo = info;
|
|
15322
|
+
},
|
|
15323
|
+
audioSamples
|
|
15324
|
+
};
|
|
15325
|
+
};
|
|
15326
|
+
|
|
15327
|
+
// src/containers/riff/seek/fetch-idx1.ts
|
|
15328
|
+
var fetchIdx1 = async ({
|
|
15329
|
+
src,
|
|
15330
|
+
readerInterface,
|
|
15331
|
+
controller,
|
|
15332
|
+
position,
|
|
15333
|
+
logLevel
|
|
15334
|
+
}) => {
|
|
15335
|
+
Log.verbose(logLevel, "Making request to fetch idx1 from ", src, "position", position);
|
|
15336
|
+
const result = await readerInterface.read({
|
|
15337
|
+
controller,
|
|
15338
|
+
range: position,
|
|
15339
|
+
src
|
|
15340
|
+
});
|
|
15341
|
+
const iterator = getArrayBufferIterator(new Uint8Array, Infinity);
|
|
15342
|
+
while (true) {
|
|
15343
|
+
const res = await result.reader.reader.read();
|
|
15344
|
+
if (res.value) {
|
|
15345
|
+
iterator.addData(res.value);
|
|
15346
|
+
}
|
|
15347
|
+
if (res.done) {
|
|
15348
|
+
break;
|
|
15349
|
+
}
|
|
15350
|
+
}
|
|
15351
|
+
const box = await expectRiffBox({
|
|
15352
|
+
iterator,
|
|
15353
|
+
stateIfExpectingSideEffects: null
|
|
15354
|
+
});
|
|
15355
|
+
iterator.destroy();
|
|
15356
|
+
if (box === null || box.type !== "idx1-box") {
|
|
15357
|
+
throw new Error("Expected idx1-box");
|
|
15358
|
+
}
|
|
15359
|
+
return {
|
|
15360
|
+
entries: box.entries.filter((entry) => entry.id.endsWith("dc")),
|
|
15361
|
+
videoTrackIndex: box.videoTrackIndex
|
|
15362
|
+
};
|
|
15363
|
+
};
|
|
15364
|
+
|
|
15365
|
+
// src/state/riff/lazy-idx1-fetch.ts
|
|
15366
|
+
var lazyIdx1Fetch = ({
|
|
15367
|
+
controller,
|
|
15368
|
+
logLevel,
|
|
15369
|
+
readerInterface,
|
|
15370
|
+
src
|
|
15371
|
+
}) => {
|
|
15372
|
+
let prom = null;
|
|
15373
|
+
let result = null;
|
|
15374
|
+
const triggerLoad = (position) => {
|
|
15375
|
+
if (result) {
|
|
15376
|
+
return Promise.resolve(result);
|
|
15377
|
+
}
|
|
15378
|
+
if (prom) {
|
|
15379
|
+
return prom;
|
|
15380
|
+
}
|
|
15381
|
+
prom = fetchIdx1({
|
|
15382
|
+
controller,
|
|
15383
|
+
logLevel,
|
|
15384
|
+
position,
|
|
15385
|
+
readerInterface,
|
|
15386
|
+
src
|
|
15387
|
+
}).then((entries) => {
|
|
15388
|
+
prom = null;
|
|
15389
|
+
result = entries;
|
|
15390
|
+
return entries;
|
|
15391
|
+
});
|
|
15392
|
+
return prom;
|
|
15393
|
+
};
|
|
15394
|
+
const getLoadedIdx1 = async () => {
|
|
15395
|
+
if (!prom) {
|
|
15396
|
+
return null;
|
|
15397
|
+
}
|
|
15398
|
+
const entries = await prom;
|
|
15399
|
+
return entries;
|
|
15400
|
+
};
|
|
15401
|
+
const getIfAlreadyLoaded = () => {
|
|
15402
|
+
if (result) {
|
|
15403
|
+
return result;
|
|
15404
|
+
}
|
|
15405
|
+
return null;
|
|
15406
|
+
};
|
|
15407
|
+
const setFromSeekingHints = (hints) => {
|
|
15408
|
+
if (hints.idx1Entries) {
|
|
15409
|
+
result = hints.idx1Entries;
|
|
15410
|
+
}
|
|
15411
|
+
};
|
|
15412
|
+
const waitForLoaded = () => {
|
|
15413
|
+
if (result) {
|
|
15414
|
+
return Promise.resolve(result);
|
|
15415
|
+
}
|
|
15416
|
+
if (prom) {
|
|
15417
|
+
return prom;
|
|
15418
|
+
}
|
|
15419
|
+
return Promise.resolve(null);
|
|
15420
|
+
};
|
|
15421
|
+
return {
|
|
15422
|
+
triggerLoad,
|
|
15423
|
+
getLoadedIdx1,
|
|
15424
|
+
getIfAlreadyLoaded,
|
|
15425
|
+
setFromSeekingHints,
|
|
15426
|
+
waitForLoaded
|
|
15427
|
+
};
|
|
15428
|
+
};
|
|
15429
|
+
|
|
15430
|
+
// src/state/riff/riff-keyframes.ts
|
|
15431
|
+
var riffKeyframesState = () => {
|
|
15432
|
+
const keyframes = [];
|
|
15433
|
+
const addKeyframe = (keyframe) => {
|
|
15434
|
+
if (keyframes.find((k) => k.positionInBytes === keyframe.positionInBytes)) {
|
|
15435
|
+
return;
|
|
15436
|
+
}
|
|
15437
|
+
keyframes.push(keyframe);
|
|
15438
|
+
};
|
|
15439
|
+
const getKeyframes2 = () => {
|
|
15440
|
+
return keyframes;
|
|
15441
|
+
};
|
|
15442
|
+
const setFromSeekingHints = (keyframesFromHints) => {
|
|
15443
|
+
for (const keyframe of keyframesFromHints) {
|
|
15444
|
+
addKeyframe(keyframe);
|
|
15445
|
+
}
|
|
15446
|
+
};
|
|
15447
|
+
return {
|
|
15448
|
+
addKeyframe,
|
|
15449
|
+
getKeyframes: getKeyframes2,
|
|
15450
|
+
setFromSeekingHints
|
|
15451
|
+
};
|
|
15452
|
+
};
|
|
15453
|
+
|
|
15454
|
+
// src/state/riff/sample-counter.ts
|
|
15455
|
+
var riffSampleCounter = () => {
|
|
15456
|
+
const samplesForTrack = {};
|
|
15457
|
+
const riffKeys = riffKeyframesState();
|
|
15458
|
+
const onAudioSample = (trackId, audioSample) => {
|
|
15459
|
+
if (typeof samplesForTrack[trackId] === "undefined") {
|
|
15460
|
+
samplesForTrack[trackId] = 0;
|
|
15461
|
+
}
|
|
15462
|
+
if (audioSample.data.length > 0) {
|
|
15463
|
+
samplesForTrack[trackId]++;
|
|
15464
|
+
}
|
|
15465
|
+
samplesForTrack[trackId]++;
|
|
15466
|
+
};
|
|
15467
|
+
const onVideoSample = (trackId, videoSample) => {
|
|
15468
|
+
if (typeof samplesForTrack[trackId] === "undefined") {
|
|
15469
|
+
samplesForTrack[trackId] = 0;
|
|
15470
|
+
}
|
|
15471
|
+
if (videoSample.type === "key") {
|
|
15472
|
+
riffKeys.addKeyframe({
|
|
15473
|
+
trackId,
|
|
15474
|
+
decodingTimeInSeconds: videoSample.dts / videoSample.timescale,
|
|
15475
|
+
positionInBytes: videoSample.offset,
|
|
15476
|
+
presentationTimeInSeconds: videoSample.cts / videoSample.timescale,
|
|
15477
|
+
sizeInBytes: videoSample.data.length,
|
|
15478
|
+
sampleCounts: { ...samplesForTrack }
|
|
15479
|
+
});
|
|
15480
|
+
}
|
|
15481
|
+
if (videoSample.data.length > 0) {
|
|
15482
|
+
samplesForTrack[trackId]++;
|
|
15483
|
+
}
|
|
15484
|
+
};
|
|
15485
|
+
const getSamplesForTrack = (trackId) => {
|
|
15486
|
+
return samplesForTrack[trackId] ?? 0;
|
|
15487
|
+
};
|
|
15488
|
+
const setSamplesFromSeek = (samples) => {
|
|
15489
|
+
for (const trackId in samples) {
|
|
15490
|
+
samplesForTrack[trackId] = samples[trackId];
|
|
14417
15491
|
}
|
|
14418
15492
|
};
|
|
15493
|
+
return {
|
|
15494
|
+
onAudioSample,
|
|
15495
|
+
onVideoSample,
|
|
15496
|
+
getSamplesForTrack,
|
|
15497
|
+
setSamplesFromSeek,
|
|
15498
|
+
riffKeys
|
|
15499
|
+
};
|
|
14419
15500
|
};
|
|
14420
15501
|
|
|
14421
15502
|
// src/state/riff.ts
|
|
14422
|
-
var riffSpecificState = (
|
|
15503
|
+
var riffSpecificState = ({
|
|
15504
|
+
controller,
|
|
15505
|
+
logLevel,
|
|
15506
|
+
readerInterface,
|
|
15507
|
+
src
|
|
15508
|
+
}) => {
|
|
14423
15509
|
let avcProfile = null;
|
|
14424
15510
|
let nextTrackIndex = 0;
|
|
14425
15511
|
const profileCallbacks = [];
|
|
@@ -14433,6 +15519,13 @@ var riffSpecificState = () => {
|
|
|
14433
15519
|
}
|
|
14434
15520
|
profileCallbacks.length = 0;
|
|
14435
15521
|
};
|
|
15522
|
+
const lazyIdx1 = lazyIdx1Fetch({
|
|
15523
|
+
controller,
|
|
15524
|
+
logLevel,
|
|
15525
|
+
readerInterface,
|
|
15526
|
+
src
|
|
15527
|
+
});
|
|
15528
|
+
const sampleCounter = riffSampleCounter();
|
|
14436
15529
|
return {
|
|
14437
15530
|
getAvcProfile: () => {
|
|
14438
15531
|
return avcProfile;
|
|
@@ -14444,7 +15537,9 @@ var riffSpecificState = () => {
|
|
|
14444
15537
|
},
|
|
14445
15538
|
incrementNextTrackIndex: () => {
|
|
14446
15539
|
nextTrackIndex++;
|
|
14447
|
-
}
|
|
15540
|
+
},
|
|
15541
|
+
lazyIdx1,
|
|
15542
|
+
sampleCounter
|
|
14448
15543
|
};
|
|
14449
15544
|
};
|
|
14450
15545
|
|
|
@@ -14456,7 +15551,7 @@ var sampleCallback = ({
|
|
|
14456
15551
|
fields,
|
|
14457
15552
|
keyframes,
|
|
14458
15553
|
emittedFields,
|
|
14459
|
-
|
|
15554
|
+
samplesObserved,
|
|
14460
15555
|
structure,
|
|
14461
15556
|
src,
|
|
14462
15557
|
seekSignal,
|
|
@@ -14473,7 +15568,6 @@ var sampleCallback = ({
|
|
|
14473
15568
|
structure
|
|
14474
15569
|
});
|
|
14475
15570
|
const tracksState = makeTracksSectionState(canSkipTracksState, src);
|
|
14476
|
-
const samplesForTrack = {};
|
|
14477
15571
|
return {
|
|
14478
15572
|
registerVideoSampleCallback: async (id, callback) => {
|
|
14479
15573
|
if (callback === null) {
|
|
@@ -14490,12 +15584,8 @@ var sampleCallback = ({
|
|
|
14490
15584
|
if (controller._internals.signal.aborted) {
|
|
14491
15585
|
throw new Error("Aborted");
|
|
14492
15586
|
}
|
|
14493
|
-
if (typeof samplesForTrack[trackId] === "undefined") {
|
|
14494
|
-
samplesForTrack[trackId] = 0;
|
|
14495
|
-
}
|
|
14496
15587
|
const callback = audioSampleCallbacks[trackId];
|
|
14497
15588
|
if (audioSample.data.length > 0) {
|
|
14498
|
-
samplesForTrack[trackId]++;
|
|
14499
15589
|
if (callback) {
|
|
14500
15590
|
if (seekSignal.getSeek()) {
|
|
14501
15591
|
Log.trace(logLevel, "Not emitting sample because seek is processing");
|
|
@@ -14505,21 +15595,14 @@ var sampleCallback = ({
|
|
|
14505
15595
|
}
|
|
14506
15596
|
}
|
|
14507
15597
|
if (needsToIterateOverSamples({ emittedFields, fields })) {
|
|
14508
|
-
|
|
15598
|
+
samplesObserved.addAudioSample(audioSample);
|
|
14509
15599
|
}
|
|
14510
15600
|
},
|
|
14511
|
-
getSamplesForTrack: (trackId) => {
|
|
14512
|
-
return samplesForTrack[trackId] ?? 0;
|
|
14513
|
-
},
|
|
14514
15601
|
onVideoSample: async (trackId, videoSample) => {
|
|
14515
15602
|
if (controller._internals.signal.aborted) {
|
|
14516
15603
|
throw new Error("Aborted");
|
|
14517
15604
|
}
|
|
14518
|
-
if (typeof samplesForTrack[trackId] === "undefined") {
|
|
14519
|
-
samplesForTrack[trackId] = 0;
|
|
14520
|
-
}
|
|
14521
15605
|
if (videoSample.data.length > 0) {
|
|
14522
|
-
samplesForTrack[trackId]++;
|
|
14523
15606
|
const callback = videoSampleCallbacks[trackId];
|
|
14524
15607
|
if (callback) {
|
|
14525
15608
|
if (seekSignal.getSeek()) {
|
|
@@ -14542,7 +15625,7 @@ var sampleCallback = ({
|
|
|
14542
15625
|
fields,
|
|
14543
15626
|
emittedFields
|
|
14544
15627
|
})) {
|
|
14545
|
-
|
|
15628
|
+
samplesObserved.addVideoSample(videoSample);
|
|
14546
15629
|
}
|
|
14547
15630
|
},
|
|
14548
15631
|
canSkipTracksState,
|
|
@@ -14571,6 +15654,7 @@ var samplesObservedState = () => {
|
|
|
14571
15654
|
let largestVideoSample;
|
|
14572
15655
|
let smallestAudioSample;
|
|
14573
15656
|
let largestAudioSample;
|
|
15657
|
+
let lastSampleObserved = false;
|
|
14574
15658
|
const videoSamples = new Map;
|
|
14575
15659
|
const audioSamples = new Map;
|
|
14576
15660
|
const getSlowVideoDurationInSeconds = () => {
|
|
@@ -14636,6 +15720,10 @@ var samplesObservedState = () => {
|
|
|
14636
15720
|
const videoSizesInBytes = Array.from(videoSamples.values()).reduce((acc, size) => acc + size, 0);
|
|
14637
15721
|
return videoSizesInBytes * 8 / videoDuration;
|
|
14638
15722
|
};
|
|
15723
|
+
const getLastSampleObserved = () => lastSampleObserved;
|
|
15724
|
+
const setLastSampleObserved = () => {
|
|
15725
|
+
lastSampleObserved = true;
|
|
15726
|
+
};
|
|
14639
15727
|
return {
|
|
14640
15728
|
addVideoSample,
|
|
14641
15729
|
addAudioSample,
|
|
@@ -14643,7 +15731,9 @@ var samplesObservedState = () => {
|
|
|
14643
15731
|
getFps: getFps2,
|
|
14644
15732
|
getSlowNumberOfFrames,
|
|
14645
15733
|
getAudioBitrate,
|
|
14646
|
-
getVideoBitrate
|
|
15734
|
+
getVideoBitrate,
|
|
15735
|
+
getLastSampleObserved,
|
|
15736
|
+
setLastSampleObserved
|
|
14647
15737
|
};
|
|
14648
15738
|
};
|
|
14649
15739
|
|
|
@@ -14779,8 +15869,8 @@ var makeParserState = ({
|
|
|
14779
15869
|
const structure = structureState();
|
|
14780
15870
|
const keyframes = keyframesState();
|
|
14781
15871
|
const emittedFields = emittedState();
|
|
14782
|
-
const
|
|
14783
|
-
const
|
|
15872
|
+
const samplesObserved = samplesObservedState();
|
|
15873
|
+
const mp3 = makeMp3State();
|
|
14784
15874
|
const images = imagesState();
|
|
14785
15875
|
const timings = timingsState();
|
|
14786
15876
|
const seekInfiniteLoop = seekInfiniteLoopDetectionState();
|
|
@@ -14800,7 +15890,7 @@ var makeParserState = ({
|
|
|
14800
15890
|
callbacks
|
|
14801
15891
|
});
|
|
14802
15892
|
return {
|
|
14803
|
-
riff: riffSpecificState(),
|
|
15893
|
+
riff: riffSpecificState({ controller, logLevel, readerInterface, src }),
|
|
14804
15894
|
transportStream: transportStreamState(),
|
|
14805
15895
|
webm: webmState({ controller, logLevel, readerInterface, src }),
|
|
14806
15896
|
iso: isoBaseMediaState({
|
|
@@ -14810,7 +15900,7 @@ var makeParserState = ({
|
|
|
14810
15900
|
src,
|
|
14811
15901
|
logLevel
|
|
14812
15902
|
}),
|
|
14813
|
-
|
|
15903
|
+
mp3,
|
|
14814
15904
|
aac: aacState(),
|
|
14815
15905
|
flac: flacState(),
|
|
14816
15906
|
m3u: m3uState(logLevel),
|
|
@@ -14822,7 +15912,7 @@ var makeParserState = ({
|
|
|
14822
15912
|
fields,
|
|
14823
15913
|
keyframes,
|
|
14824
15914
|
emittedFields,
|
|
14825
|
-
|
|
15915
|
+
samplesObserved,
|
|
14826
15916
|
structure,
|
|
14827
15917
|
src,
|
|
14828
15918
|
seekSignal: controller._internals.seekSignal,
|
|
@@ -14840,7 +15930,7 @@ var makeParserState = ({
|
|
|
14840
15930
|
onVideoTrack,
|
|
14841
15931
|
emittedFields,
|
|
14842
15932
|
fields,
|
|
14843
|
-
|
|
15933
|
+
samplesObserved,
|
|
14844
15934
|
contentLength,
|
|
14845
15935
|
images,
|
|
14846
15936
|
mediaSection: mediaSectionState(),
|
|
@@ -15004,7 +16094,13 @@ var internalParseMedia = async function({
|
|
|
15004
16094
|
mp4HeaderSegment: state.mp4HeaderSegment,
|
|
15005
16095
|
mediaSectionState: state.mediaSection,
|
|
15006
16096
|
isoState: state.iso,
|
|
15007
|
-
transportStream: state.transportStream
|
|
16097
|
+
transportStream: state.transportStream,
|
|
16098
|
+
flacState: state.flac,
|
|
16099
|
+
samplesObserved: state.samplesObserved,
|
|
16100
|
+
riffState: state.riff,
|
|
16101
|
+
mp3State: state.mp3,
|
|
16102
|
+
contentLength: state.contentLength,
|
|
16103
|
+
aacState: state.aac
|
|
15008
16104
|
})));
|
|
15009
16105
|
if (!hasAudioTrackHandlers && !hasVideoTrackHandlers && Object.values(state.fields).every((v) => !v) && mode === "query") {
|
|
15010
16106
|
Log.warn(logLevel, new Error("Warning - No `fields` and no `on*` callbacks were passed to `parseMedia()`. Specify the data you would like to retrieve."));
|