@remotion/media-parser 4.0.301 → 4.0.303
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/check-if-done.js +4 -0
- package/dist/containers/aac/parse-aac.js +1 -0
- package/dist/containers/avc/key.d.ts +1 -1
- package/dist/containers/avc/key.js +5 -1
- package/dist/containers/avc/max-buffer-size.d.ts +3 -0
- package/dist/containers/avc/max-buffer-size.js +40 -0
- package/dist/containers/avc/parse-avc.d.ts +6 -7
- package/dist/containers/avc/parse-avc.js +83 -7
- package/dist/containers/flac/parse-streaminfo.js +1 -0
- package/dist/containers/iso-base-media/base-media-box.d.ts +2 -1
- package/dist/containers/iso-base-media/elst.d.ts +19 -0
- package/dist/containers/iso-base-media/elst.js +33 -0
- package/dist/containers/iso-base-media/find-keyframe-before-time.d.ts +2 -1
- package/dist/containers/iso-base-media/find-keyframe-before-time.js +3 -3
- package/dist/containers/iso-base-media/get-moov-atom.js +1 -0
- package/dist/containers/iso-base-media/get-seeking-byte-from-fragmented-mp4.js +1 -0
- package/dist/containers/iso-base-media/get-seeking-byte.js +1 -0
- package/dist/containers/iso-base-media/make-track.d.ts +1 -1
- package/dist/containers/iso-base-media/make-track.js +4 -1
- package/dist/containers/iso-base-media/mdat/get-editlist.d.ts +5 -0
- package/dist/containers/iso-base-media/mdat/get-editlist.js +21 -0
- package/dist/containers/iso-base-media/mdat/mdat.js +8 -5
- package/dist/containers/iso-base-media/parse-boxes.js +1 -0
- package/dist/containers/iso-base-media/process-box.d.ts +2 -0
- package/dist/containers/iso-base-media/process-box.js +31 -4
- package/dist/containers/iso-base-media/traversal.d.ts +2 -0
- package/dist/containers/iso-base-media/traversal.js +10 -1
- package/dist/containers/mp3/parse-mpeg-header.js +1 -0
- package/dist/containers/riff/convert-queued-sample-to-mediaparser-sample.d.ts +14 -0
- package/dist/containers/riff/convert-queued-sample-to-mediaparser-sample.js +55 -0
- package/dist/containers/riff/get-seeking-byte.d.ts +3 -1
- package/dist/containers/riff/get-seeking-byte.js +5 -1
- package/dist/containers/riff/get-strh-for-index.d.ts +2 -0
- package/dist/containers/riff/get-strh-for-index.js +17 -0
- package/dist/containers/riff/get-tracks-from-avi.js +2 -0
- package/dist/containers/riff/parse-movi.js +51 -44
- package/dist/containers/riff/parse-riff-body.js +8 -0
- package/dist/containers/transport-stream/handle-aac-packet.js +1 -0
- package/dist/containers/transport-stream/handle-avc-packet.d.ts +3 -1
- package/dist/containers/transport-stream/handle-avc-packet.js +4 -3
- package/dist/containers/transport-stream/parse-transport-stream.js +1 -0
- package/dist/containers/transport-stream/process-audio.d.ts +3 -1
- package/dist/containers/transport-stream/process-audio.js +2 -1
- package/dist/containers/transport-stream/process-sample-if-possible.js +2 -0
- package/dist/containers/transport-stream/process-stream-buffers.d.ts +5 -2
- package/dist/containers/transport-stream/process-stream-buffers.js +4 -2
- package/dist/containers/transport-stream/process-video.d.ts +3 -1
- package/dist/containers/transport-stream/process-video.js +2 -1
- package/dist/containers/wav/parse-fmt.js +1 -0
- package/dist/containers/webm/get-sample-from-block.d.ts +3 -1
- package/dist/containers/webm/get-sample-from-block.js +4 -3
- package/dist/containers/webm/make-track.js +2 -0
- package/dist/containers/webm/parse-ebml.d.ts +1 -1
- package/dist/containers/webm/parse-ebml.js +3 -1
- package/dist/containers/webm/state-for-processing.d.ts +3 -1
- package/dist/containers/webm/state-for-processing.js +2 -1
- package/dist/convert-audio-or-video-sample.js +1 -0
- package/dist/esm/index.mjs +597 -127
- package/dist/esm/universal.mjs +9 -8
- package/dist/esm/web.mjs +9 -8
- package/dist/esm/worker-server-entry.mjs +596 -126
- package/dist/esm/worker-web-entry.mjs +596 -126
- package/dist/get-seeking-byte.d.ts +3 -1
- package/dist/get-seeking-byte.js +2 -1
- package/dist/get-tracks.d.ts +3 -0
- package/dist/get-tracks.js +10 -1
- package/dist/index.d.ts +34 -3
- package/dist/readers/from-fetch.js +2 -1
- package/dist/run-parse-iteration.js +0 -3
- package/dist/state/avc/avc-state.d.ts +12 -0
- package/dist/state/avc/avc-state.js +44 -0
- package/dist/state/iso-base-media/iso-state.d.ts +4 -0
- package/dist/state/iso-base-media/iso-state.js +2 -0
- package/dist/state/iso-base-media/timescale-state.d.ts +5 -0
- package/dist/state/iso-base-media/timescale-state.js +13 -0
- package/dist/state/parser-state.d.ts +34 -3
- package/dist/state/parser-state.js +3 -0
- package/dist/state/riff/queued-b-frames.d.ts +9 -0
- package/dist/state/riff/queued-b-frames.js +47 -0
- package/dist/state/riff/queued-frames.d.ts +9 -0
- package/dist/state/riff/queued-frames.js +39 -0
- package/dist/state/riff/riff-keyframes.js +1 -0
- package/dist/state/riff/sample-counter.d.ts +13 -2
- package/dist/state/riff/sample-counter.js +34 -7
- package/dist/state/riff.d.ts +19 -2
- package/dist/state/riff.js +3 -0
- package/dist/state/transport-stream/last-emitted-sample.d.ts +1 -1
- package/dist/state/transport-stream/transport-stream.d.ts +1 -1
- package/dist/version.d.ts +1 -1
- package/dist/version.js +1 -1
- package/dist/webcodec-sample-types.d.ts +11 -0
- package/dist/work-on-seek-request.d.ts +2 -0
- package/dist/work-on-seek-request.js +5 -2
- package/package.json +3 -3
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
-
exports.getTrakBoxByTrackId = exports.getTfraBoxes = exports.getTfraBoxesFromMfraBoxChildren = exports.getTrunBoxes = exports.getTfhdBox = exports.getTfdtBox = exports.getStssBox = exports.getStscBox = exports.getStszBox = exports.getCttsBox = exports.getSttsBox = exports.getStcoBox = exports.getVideoDescriptors = exports.getStsdBox = exports.getStblBox = exports.getMdhdBox = exports.getMdiaBox = exports.getTkhdBox = exports.getTraks = exports.getMvhdBox = exports.getMoofBoxes = exports.getMoovBoxFromState = exports.getMoovFromFromIsoStructure = exports.getFtypBox = void 0;
|
|
3
|
+
exports.getElstBox = exports.getTrakBoxByTrackId = exports.getTfraBoxes = exports.getTfraBoxesFromMfraBoxChildren = exports.getTrunBoxes = exports.getTfhdBox = exports.getTfdtBox = exports.getStssBox = exports.getStscBox = exports.getStszBox = exports.getCttsBox = exports.getSttsBox = exports.getStcoBox = exports.getVideoDescriptors = exports.getStsdBox = exports.getStblBox = exports.getMdhdBox = exports.getMdiaBox = exports.getTkhdBox = exports.getTraks = exports.getMvhdBox = exports.getMoofBoxes = exports.getMoovBoxFromState = exports.getMoovFromFromIsoStructure = exports.getFtypBox = void 0;
|
|
4
4
|
const precomputed_moof_1 = require("../../state/iso-base-media/precomputed-moof");
|
|
5
5
|
const getFtypBox = (segments) => {
|
|
6
6
|
const ftypBox = segments.find((s) => s.type === 'ftyp-box');
|
|
@@ -223,3 +223,12 @@ const getTrakBoxByTrackId = (moovBox, trackId) => {
|
|
|
223
223
|
})) !== null && _a !== void 0 ? _a : null);
|
|
224
224
|
};
|
|
225
225
|
exports.getTrakBoxByTrackId = getTrakBoxByTrackId;
|
|
226
|
+
const getElstBox = (trakBox) => {
|
|
227
|
+
const edtsBox = trakBox.children.find((s) => s.type === 'regular-box' && s.boxType === 'edts');
|
|
228
|
+
if (!edtsBox || edtsBox.type !== 'regular-box') {
|
|
229
|
+
return null;
|
|
230
|
+
}
|
|
231
|
+
const elstBox = edtsBox.children.find((s) => s.type === 'elst-box');
|
|
232
|
+
return elstBox;
|
|
233
|
+
};
|
|
234
|
+
exports.getElstBox = getElstBox;
|
|
@@ -0,0 +1,14 @@
|
|
|
1
|
+
import type { ParserState } from '../../state/parser-state';
|
|
2
|
+
import type { QueuedVideoSample } from '../../state/riff/queued-frames';
|
|
3
|
+
export declare const convertQueuedSampleToMediaParserSample: (sample: QueuedVideoSample, state: ParserState) => {
|
|
4
|
+
timestamp: number;
|
|
5
|
+
cts: number;
|
|
6
|
+
dts: number;
|
|
7
|
+
type: "key" | "delta";
|
|
8
|
+
data: Uint8Array;
|
|
9
|
+
duration: number | undefined;
|
|
10
|
+
trackId: number;
|
|
11
|
+
offset: number;
|
|
12
|
+
timescale: number;
|
|
13
|
+
avc?: import("../../webcodec-sample-types").MediaParserAvcExtraInfo | undefined;
|
|
14
|
+
};
|
|
@@ -0,0 +1,55 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.convertQueuedSampleToMediaParserSample = void 0;
|
|
4
|
+
const convert_audio_or_video_sample_1 = require("../../convert-audio-or-video-sample");
|
|
5
|
+
const get_strh_for_index_1 = require("./get-strh-for-index");
|
|
6
|
+
const getKeyFrameOffsetAndPocs = ({ state, sample, }) => {
|
|
7
|
+
var _a, _b;
|
|
8
|
+
if (sample.type === 'key') {
|
|
9
|
+
const sampleOffset = state.riff.sampleCounter.getSampleCountForTrack({
|
|
10
|
+
trackId: sample.trackId,
|
|
11
|
+
});
|
|
12
|
+
return {
|
|
13
|
+
sampleOffsetAtKeyframe: sampleOffset,
|
|
14
|
+
pocsAtKeyframeOffset: [(_b = (_a = sample.avc) === null || _a === void 0 ? void 0 : _a.poc) !== null && _b !== void 0 ? _b : 0],
|
|
15
|
+
};
|
|
16
|
+
}
|
|
17
|
+
const riffKeyframes = state.riff.sampleCounter.riffKeys.getKeyframes();
|
|
18
|
+
const keyframeAtOffset = riffKeyframes.findLast((k) => k.positionInBytes <= sample.offset);
|
|
19
|
+
if (!keyframeAtOffset) {
|
|
20
|
+
throw new Error('no keyframe at offset');
|
|
21
|
+
}
|
|
22
|
+
const sampleOffsetAtKeyframe = keyframeAtOffset.sampleCounts[sample.trackId];
|
|
23
|
+
const pocsAtKeyframeOffset = state.riff.sampleCounter.getPocAtKeyframeOffset({
|
|
24
|
+
keyframeOffset: keyframeAtOffset.positionInBytes,
|
|
25
|
+
});
|
|
26
|
+
return {
|
|
27
|
+
sampleOffsetAtKeyframe,
|
|
28
|
+
pocsAtKeyframeOffset,
|
|
29
|
+
};
|
|
30
|
+
};
|
|
31
|
+
const convertQueuedSampleToMediaParserSample = (sample, state) => {
|
|
32
|
+
const strh = (0, get_strh_for_index_1.getStrhForIndex)(state.structure.getRiffStructure(), sample.trackId);
|
|
33
|
+
const samplesPerSecond = strh.rate / strh.scale;
|
|
34
|
+
const { sampleOffsetAtKeyframe, pocsAtKeyframeOffset } = getKeyFrameOffsetAndPocs({
|
|
35
|
+
sample,
|
|
36
|
+
state,
|
|
37
|
+
});
|
|
38
|
+
const indexOfPoc = pocsAtKeyframeOffset.findIndex((poc) => { var _a; return poc === ((_a = sample.avc) === null || _a === void 0 ? void 0 : _a.poc); });
|
|
39
|
+
if (indexOfPoc === -1) {
|
|
40
|
+
throw new Error('poc not found');
|
|
41
|
+
}
|
|
42
|
+
const nthSample = indexOfPoc + sampleOffsetAtKeyframe;
|
|
43
|
+
const timestamp = nthSample / samplesPerSecond;
|
|
44
|
+
const videoSample = (0, convert_audio_or_video_sample_1.convertAudioOrVideoSampleToWebCodecsTimestamps)({
|
|
45
|
+
sample: {
|
|
46
|
+
...sample,
|
|
47
|
+
timestamp,
|
|
48
|
+
cts: timestamp,
|
|
49
|
+
dts: timestamp,
|
|
50
|
+
},
|
|
51
|
+
timescale: 1,
|
|
52
|
+
});
|
|
53
|
+
return videoSample;
|
|
54
|
+
};
|
|
55
|
+
exports.convertQueuedSampleToMediaParserSample = convertQueuedSampleToMediaParserSample;
|
|
@@ -1,8 +1,10 @@
|
|
|
1
|
+
import type { AvcState } from '../../state/avc/avc-state';
|
|
1
2
|
import type { RiffState } from '../../state/riff';
|
|
2
3
|
import type { SeekResolution } from '../../work-on-seek-request';
|
|
3
4
|
import type { RiffSeekingHints } from './seeking-hints';
|
|
4
|
-
export declare const getSeekingByteForRiff: ({ info, time, riffState, }: {
|
|
5
|
+
export declare const getSeekingByteForRiff: ({ info, time, riffState, avcState, }: {
|
|
5
6
|
info: RiffSeekingHints;
|
|
6
7
|
time: number;
|
|
7
8
|
riffState: RiffState;
|
|
9
|
+
avcState: AvcState;
|
|
8
10
|
}) => Promise<SeekResolution>;
|
|
@@ -2,7 +2,7 @@
|
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
3
|
exports.getSeekingByteForRiff = void 0;
|
|
4
4
|
const find_last_keyframe_1 = require("../../find-last-keyframe");
|
|
5
|
-
const getSeekingByteForRiff = async ({ info, time, riffState, }) => {
|
|
5
|
+
const getSeekingByteForRiff = async ({ info, time, riffState, avcState, }) => {
|
|
6
6
|
const idx1Entries = await (info.hasIndex
|
|
7
7
|
? riffState.lazyIdx1.waitForLoaded()
|
|
8
8
|
: Promise.resolve(null));
|
|
@@ -17,6 +17,8 @@ const getSeekingByteForRiff = async ({ info, time, riffState, }) => {
|
|
|
17
17
|
};
|
|
18
18
|
}
|
|
19
19
|
riffState.sampleCounter.setSamplesFromSeek(lastKeyframe.sampleCounts);
|
|
20
|
+
riffState.queuedBFrames.clear();
|
|
21
|
+
avcState.clear();
|
|
20
22
|
return {
|
|
21
23
|
type: 'do-seek',
|
|
22
24
|
byte: lastKeyframe.positionInBytes,
|
|
@@ -48,6 +50,8 @@ const getSeekingByteForRiff = async ({ info, time, riffState, }) => {
|
|
|
48
50
|
throw new Error('moviOffset is null');
|
|
49
51
|
}
|
|
50
52
|
riffState.sampleCounter.setSamplesFromSeek(bestEntry.sampleCounts);
|
|
53
|
+
riffState.queuedBFrames.clear();
|
|
54
|
+
avcState.clear();
|
|
51
55
|
return {
|
|
52
56
|
type: 'do-seek',
|
|
53
57
|
byte: bestEntry.offset + info.moviOffset - 4,
|
|
@@ -0,0 +1,17 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.getStrhForIndex = void 0;
|
|
4
|
+
const traversal_1 = require("./traversal");
|
|
5
|
+
const getStrhForIndex = (structure, trackId) => {
|
|
6
|
+
const boxes = (0, traversal_1.getStrlBoxes)(structure);
|
|
7
|
+
const box = boxes[trackId];
|
|
8
|
+
if (!box) {
|
|
9
|
+
throw new Error('Expected box');
|
|
10
|
+
}
|
|
11
|
+
const strh = (0, traversal_1.getStrhBox)(box.children);
|
|
12
|
+
if (!strh) {
|
|
13
|
+
throw new Error('strh');
|
|
14
|
+
}
|
|
15
|
+
return strh;
|
|
16
|
+
};
|
|
17
|
+
exports.getStrhForIndex = getStrhForIndex;
|
|
@@ -28,6 +28,7 @@ const makeAviAudioTrack = ({ strf, index, }) => {
|
|
|
28
28
|
sampleRate: strf.sampleRate,
|
|
29
29
|
timescale: timescale_1.MEDIA_PARSER_RIFF_TIMESCALE,
|
|
30
30
|
trackId: index,
|
|
31
|
+
startInSeconds: 0,
|
|
31
32
|
};
|
|
32
33
|
};
|
|
33
34
|
exports.makeAviAudioTrack = makeAviAudioTrack;
|
|
@@ -68,6 +69,7 @@ const makeAviVideoTrack = ({ strh, strf, index, }) => {
|
|
|
68
69
|
denominator: 1,
|
|
69
70
|
},
|
|
70
71
|
fps: strh.rate / strh.scale,
|
|
72
|
+
startInSeconds: 0,
|
|
71
73
|
};
|
|
72
74
|
};
|
|
73
75
|
exports.makeAviVideoTrack = makeAviVideoTrack;
|
|
@@ -4,66 +4,74 @@ exports.parseMovi = exports.handleChunk = void 0;
|
|
|
4
4
|
const convert_audio_or_video_sample_1 = require("../../convert-audio-or-video-sample");
|
|
5
5
|
const key_1 = require("../avc/key");
|
|
6
6
|
const parse_avc_1 = require("../avc/parse-avc");
|
|
7
|
-
const
|
|
8
|
-
const
|
|
9
|
-
const boxes = (0, traversal_1.getStrlBoxes)(structure);
|
|
10
|
-
const box = boxes[trackId];
|
|
11
|
-
if (!box) {
|
|
12
|
-
throw new Error('Expected box');
|
|
13
|
-
}
|
|
14
|
-
const strh = (0, traversal_1.getStrhBox)(box.children);
|
|
15
|
-
if (!strh) {
|
|
16
|
-
throw new Error('strh');
|
|
17
|
-
}
|
|
18
|
-
return strh;
|
|
19
|
-
};
|
|
7
|
+
const convert_queued_sample_to_mediaparser_sample_1 = require("./convert-queued-sample-to-mediaparser-sample");
|
|
8
|
+
const get_strh_for_index_1 = require("./get-strh-for-index");
|
|
20
9
|
const handleChunk = async ({ state, ckId, ckSize, }) => {
|
|
10
|
+
var _a;
|
|
21
11
|
const { iterator } = state;
|
|
22
12
|
const offset = iterator.counter.getOffset() - 8;
|
|
23
13
|
const videoChunk = ckId.match(/^([0-9]{2})dc$/);
|
|
24
14
|
if (videoChunk) {
|
|
25
15
|
const trackId = parseInt(videoChunk[1], 10);
|
|
26
|
-
const strh = getStrhForIndex(state.structure.getRiffStructure(), trackId);
|
|
16
|
+
const strh = (0, get_strh_for_index_1.getStrhForIndex)(state.structure.getRiffStructure(), trackId);
|
|
27
17
|
const samplesPerSecond = strh.rate / strh.scale;
|
|
28
|
-
const nthSample = state.riff.sampleCounter.getSamplesForTrack(trackId);
|
|
29
|
-
const timeInSec = nthSample / samplesPerSecond;
|
|
30
|
-
const timestamp = timeInSec;
|
|
31
18
|
const data = iterator.getSlice(ckSize);
|
|
32
|
-
const infos = (0, parse_avc_1.parseAvc)(data);
|
|
19
|
+
const infos = (0, parse_avc_1.parseAvc)(data, state.avc);
|
|
33
20
|
const keyOrDelta = (0, key_1.getKeyFrameOrDeltaFromAvcInfo)(infos);
|
|
21
|
+
const info = infos.find((i) => i.type === 'keyframe' || i.type === 'delta-frame');
|
|
34
22
|
const avcProfile = infos.find((i) => i.type === 'avc-profile');
|
|
35
23
|
const ppsProfile = infos.find((i) => i.type === 'avc-pps');
|
|
36
24
|
if (avcProfile && ppsProfile && !state.riff.getAvcProfile()) {
|
|
37
25
|
await state.riff.onProfile({ pps: ppsProfile, sps: avcProfile });
|
|
38
26
|
state.callbacks.tracks.setIsDone(state.logLevel);
|
|
39
27
|
}
|
|
40
|
-
const
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
28
|
+
const rawSample = {
|
|
29
|
+
data,
|
|
30
|
+
// We must also NOT pass a duration because if the the next sample is 0,
|
|
31
|
+
// this sample would be longer. Chrome will pad it with silence.
|
|
32
|
+
// If we'd pass a duration instead, it would shift the audio and we think that audio is not finished
|
|
33
|
+
duration: 1 / samplesPerSecond,
|
|
34
|
+
trackId,
|
|
35
|
+
type: keyOrDelta === 'bidirectional' ? 'delta' : keyOrDelta,
|
|
36
|
+
offset,
|
|
37
|
+
timescale: samplesPerSecond,
|
|
38
|
+
avc: info,
|
|
39
|
+
};
|
|
40
|
+
const maxFramesInBuffer = state.avc.getMaxFramesInBuffer();
|
|
41
|
+
if (maxFramesInBuffer === null) {
|
|
42
|
+
throw new Error('maxFramesInBuffer is null');
|
|
43
|
+
}
|
|
44
|
+
if (((_a = info === null || info === void 0 ? void 0 : info.poc) !== null && _a !== void 0 ? _a : null) === null) {
|
|
45
|
+
throw new Error('poc is null');
|
|
46
|
+
}
|
|
47
|
+
const keyframeOffset = state.riff.sampleCounter.getKeyframeAtOffset(rawSample);
|
|
48
|
+
if (keyframeOffset !== null) {
|
|
49
|
+
state.riff.sampleCounter.setPocAtKeyframeOffset({
|
|
50
|
+
keyframeOffset,
|
|
51
|
+
poc: info.poc,
|
|
52
|
+
});
|
|
53
|
+
}
|
|
54
|
+
state.riff.queuedBFrames.addFrame(rawSample, maxFramesInBuffer);
|
|
55
|
+
const releasedFrame = state.riff.queuedBFrames.getReleasedFrame();
|
|
56
|
+
if (!releasedFrame) {
|
|
57
|
+
return;
|
|
58
|
+
}
|
|
59
|
+
const videoSample = (0, convert_queued_sample_to_mediaparser_sample_1.convertQueuedSampleToMediaParserSample)(releasedFrame, state);
|
|
60
|
+
state.riff.sampleCounter.onVideoSample(videoSample);
|
|
58
61
|
await state.callbacks.onVideoSample(trackId, videoSample);
|
|
59
|
-
return;
|
|
60
62
|
}
|
|
61
63
|
const audioChunk = ckId.match(/^([0-9]{2})wb$/);
|
|
62
64
|
if (audioChunk) {
|
|
63
65
|
const trackId = parseInt(audioChunk[1], 10);
|
|
64
|
-
const strh = getStrhForIndex(state.structure.getRiffStructure(), trackId);
|
|
65
|
-
const
|
|
66
|
-
|
|
66
|
+
const strh = (0, get_strh_for_index_1.getStrhForIndex)(state.structure.getRiffStructure(), trackId);
|
|
67
|
+
const { strf } = strh;
|
|
68
|
+
if (strf.type !== 'strf-box-audio') {
|
|
69
|
+
throw new Error('audio');
|
|
70
|
+
}
|
|
71
|
+
const samplesPerSecond = (strh.rate / strh.scale) * strf.numberOfChannels;
|
|
72
|
+
const nthSample = state.riff.sampleCounter.getSampleCountForTrack({
|
|
73
|
+
trackId,
|
|
74
|
+
});
|
|
67
75
|
const timeInSec = nthSample / samplesPerSecond;
|
|
68
76
|
const timestamp = timeInSec;
|
|
69
77
|
const data = iterator.getSlice(ckSize);
|
|
@@ -71,7 +79,9 @@ const handleChunk = async ({ state, ckId, ckSize, }) => {
|
|
|
71
79
|
sample: {
|
|
72
80
|
cts: timestamp,
|
|
73
81
|
dts: timestamp,
|
|
74
|
-
data,
|
|
82
|
+
data, // We must also NOT pass a duration because if the the next sample is 0,
|
|
83
|
+
// this sample would be longer. Chrome will pad it with silence.
|
|
84
|
+
// If we'd pass a duration instead, it would shift the audio and we think that audio is not finished
|
|
75
85
|
duration: undefined,
|
|
76
86
|
timestamp,
|
|
77
87
|
trackId,
|
|
@@ -84,9 +94,6 @@ const handleChunk = async ({ state, ckId, ckSize, }) => {
|
|
|
84
94
|
state.riff.sampleCounter.onAudioSample(trackId, audioSample);
|
|
85
95
|
// In example.avi, we have samples with 0 data
|
|
86
96
|
// Chrome fails on these
|
|
87
|
-
// We must also NOT pass a duration because if the the next sample is 0,
|
|
88
|
-
// this sample would be longer. Chrome will pad it with silence.
|
|
89
|
-
// If we'd pass a duration instead, it would shift the audio and we think that audio is not finished
|
|
90
97
|
await state.callbacks.onAudioSample(trackId, audioSample);
|
|
91
98
|
}
|
|
92
99
|
};
|
|
@@ -4,9 +4,17 @@ exports.parseRiffBody = void 0;
|
|
|
4
4
|
const skip_1 = require("../../skip");
|
|
5
5
|
const may_skip_video_data_1 = require("../../state/may-skip-video-data");
|
|
6
6
|
const video_section_1 = require("../../state/video-section");
|
|
7
|
+
const convert_queued_sample_to_mediaparser_sample_1 = require("./convert-queued-sample-to-mediaparser-sample");
|
|
7
8
|
const expect_riff_box_1 = require("./expect-riff-box");
|
|
8
9
|
const parse_video_section_1 = require("./parse-video-section");
|
|
9
10
|
const parseRiffBody = async (state) => {
|
|
11
|
+
const releasedFrame = state.riff.queuedBFrames.getReleasedFrame();
|
|
12
|
+
if (releasedFrame) {
|
|
13
|
+
const converted = (0, convert_queued_sample_to_mediaparser_sample_1.convertQueuedSampleToMediaParserSample)(releasedFrame, state);
|
|
14
|
+
state.riff.sampleCounter.onVideoSample(converted);
|
|
15
|
+
await state.callbacks.onVideoSample(releasedFrame.trackId, converted);
|
|
16
|
+
return null;
|
|
17
|
+
}
|
|
10
18
|
if (state.mediaSection.isCurrentByteInMediaSection(state.iterator) ===
|
|
11
19
|
'in-section') {
|
|
12
20
|
if ((0, may_skip_video_data_1.maySkipVideoData)({
|
|
@@ -1,10 +1,11 @@
|
|
|
1
1
|
import type { MediaParserLogLevel } from '../../log';
|
|
2
|
+
import type { AvcState } from '../../state/avc/avc-state';
|
|
2
3
|
import type { CallbacksState } from '../../state/sample-callbacks';
|
|
3
4
|
import type { TransportStreamState } from '../../state/transport-stream/transport-stream';
|
|
4
5
|
import type { MediaParserOnVideoTrack } from '../../webcodec-sample-types';
|
|
5
6
|
import type { TransportStreamPacketBuffer } from './process-stream-buffers';
|
|
6
7
|
export declare const MPEG_TIMESCALE = 90000;
|
|
7
|
-
export declare const handleAvcPacket: ({ streamBuffer, programId, offset, sampleCallbacks, logLevel, onVideoTrack, transportStream, makeSamplesStartAtZero, }: {
|
|
8
|
+
export declare const handleAvcPacket: ({ streamBuffer, programId, offset, sampleCallbacks, logLevel, onVideoTrack, transportStream, makeSamplesStartAtZero, avcState, }: {
|
|
8
9
|
streamBuffer: TransportStreamPacketBuffer;
|
|
9
10
|
programId: number;
|
|
10
11
|
offset: number;
|
|
@@ -13,4 +14,5 @@ export declare const handleAvcPacket: ({ streamBuffer, programId, offset, sample
|
|
|
13
14
|
onVideoTrack: MediaParserOnVideoTrack | null;
|
|
14
15
|
transportStream: TransportStreamState;
|
|
15
16
|
makeSamplesStartAtZero: boolean;
|
|
17
|
+
avcState: AvcState;
|
|
16
18
|
}) => Promise<void>;
|
|
@@ -11,9 +11,9 @@ const parse_avc_1 = require("../avc/parse-avc");
|
|
|
11
11
|
const sps_and_pps_1 = require("../avc/sps-and-pps");
|
|
12
12
|
const color_to_webcodecs_colors_1 = require("../iso-base-media/color-to-webcodecs-colors");
|
|
13
13
|
exports.MPEG_TIMESCALE = 90000;
|
|
14
|
-
const handleAvcPacket = async ({ streamBuffer, programId, offset, sampleCallbacks, logLevel, onVideoTrack, transportStream, makeSamplesStartAtZero, }) => {
|
|
14
|
+
const handleAvcPacket = async ({ streamBuffer, programId, offset, sampleCallbacks, logLevel, onVideoTrack, transportStream, makeSamplesStartAtZero, avcState, }) => {
|
|
15
15
|
var _a, _b;
|
|
16
|
-
const avc = (0, parse_avc_1.parseAvc)(streamBuffer.getBuffer());
|
|
16
|
+
const avc = (0, parse_avc_1.parseAvc)(streamBuffer.getBuffer(), avcState);
|
|
17
17
|
const isTrackRegistered = sampleCallbacks.tracks.getTracks().find((t) => {
|
|
18
18
|
return t.trackId === programId;
|
|
19
19
|
});
|
|
@@ -56,6 +56,7 @@ const handleAvcPacket = async ({ streamBuffer, programId, offset, sampleCallback
|
|
|
56
56
|
},
|
|
57
57
|
colorSpace: (0, color_to_webcodecs_colors_1.mediaParserAdvancedColorToWebCodecsColor)(advancedColor),
|
|
58
58
|
advancedColor,
|
|
59
|
+
startInSeconds: 0,
|
|
59
60
|
};
|
|
60
61
|
await (0, register_track_1.registerVideoTrack)({
|
|
61
62
|
track,
|
|
@@ -78,7 +79,7 @@ const handleAvcPacket = async ({ streamBuffer, programId, offset, sampleCallback
|
|
|
78
79
|
duration: undefined,
|
|
79
80
|
data: streamBuffer.getBuffer(),
|
|
80
81
|
trackId: programId,
|
|
81
|
-
type,
|
|
82
|
+
type: type === 'bidirectional' ? 'delta' : type,
|
|
82
83
|
offset,
|
|
83
84
|
timescale: exports.MPEG_TIMESCALE,
|
|
84
85
|
};
|
|
@@ -1,5 +1,6 @@
|
|
|
1
1
|
import type { MediaParserLogLevel } from '../../log';
|
|
2
2
|
import type { TransportStreamStructure } from '../../parse-result';
|
|
3
|
+
import type { AvcState } from '../../state/avc/avc-state';
|
|
3
4
|
import type { CallbacksState } from '../../state/sample-callbacks';
|
|
4
5
|
import type { TransportStreamState } from '../../state/transport-stream/transport-stream';
|
|
5
6
|
import type { MediaParserOnAudioTrack, MediaParserOnVideoTrack } from '../../webcodec-sample-types';
|
|
@@ -8,7 +9,7 @@ import type { TransportStreamPacketBuffer } from './process-stream-buffers';
|
|
|
8
9
|
export declare const canProcessAudio: ({ streamBuffer, }: {
|
|
9
10
|
streamBuffer: TransportStreamPacketBuffer;
|
|
10
11
|
}) => boolean;
|
|
11
|
-
export declare const processAudio: ({ transportStreamEntry, structure, offset, sampleCallbacks, logLevel, onAudioTrack, onVideoTrack, transportStream, makeSamplesStartAtZero, }: {
|
|
12
|
+
export declare const processAudio: ({ transportStreamEntry, structure, offset, sampleCallbacks, logLevel, onAudioTrack, onVideoTrack, transportStream, makeSamplesStartAtZero, avcState, }: {
|
|
12
13
|
transportStreamEntry: TransportStreamEntry;
|
|
13
14
|
structure: TransportStreamStructure;
|
|
14
15
|
sampleCallbacks: CallbacksState;
|
|
@@ -18,4 +19,5 @@ export declare const processAudio: ({ transportStreamEntry, structure, offset, s
|
|
|
18
19
|
transportStream: TransportStreamState;
|
|
19
20
|
offset: number;
|
|
20
21
|
makeSamplesStartAtZero: boolean;
|
|
22
|
+
avcState: AvcState;
|
|
21
23
|
}) => Promise<void>;
|
|
@@ -15,7 +15,7 @@ const canProcessAudio = ({ streamBuffer, }) => {
|
|
|
15
15
|
return true;
|
|
16
16
|
};
|
|
17
17
|
exports.canProcessAudio = canProcessAudio;
|
|
18
|
-
const processAudio = async ({ transportStreamEntry, structure, offset, sampleCallbacks, logLevel, onAudioTrack, onVideoTrack, transportStream, makeSamplesStartAtZero, }) => {
|
|
18
|
+
const processAudio = async ({ transportStreamEntry, structure, offset, sampleCallbacks, logLevel, onAudioTrack, onVideoTrack, transportStream, makeSamplesStartAtZero, avcState, }) => {
|
|
19
19
|
var _a, _b;
|
|
20
20
|
const { streamBuffers, nextPesHeaderStore: nextPesHeader } = transportStream;
|
|
21
21
|
const streamBuffer = streamBuffers.get(transportStreamEntry.pid);
|
|
@@ -43,6 +43,7 @@ const processAudio = async ({ transportStreamEntry, structure, offset, sampleCal
|
|
|
43
43
|
onVideoTrack,
|
|
44
44
|
transportStream,
|
|
45
45
|
makeSamplesStartAtZero,
|
|
46
|
+
avcState,
|
|
46
47
|
});
|
|
47
48
|
const rest = streamBuffer.getBuffer().slice(expectedLength);
|
|
48
49
|
streamBuffers.set(transportStreamEntry.pid, (0, process_stream_buffers_1.makeTransportStreamPacketBuffer)({
|
|
@@ -28,6 +28,7 @@ const processSampleIfPossible = async (state) => {
|
|
|
28
28
|
onVideoTrack: state.onVideoTrack,
|
|
29
29
|
transportStream: state.transportStream,
|
|
30
30
|
makeSamplesStartAtZero: state.makeSamplesStartAtZero,
|
|
31
|
+
avcState: state.avc,
|
|
31
32
|
});
|
|
32
33
|
state.transportStream.streamBuffers.delete(stream.pid);
|
|
33
34
|
state.transportStream.streamBuffers.set(stream.pid, (0, process_stream_buffers_1.makeTransportStreamPacketBuffer)({
|
|
@@ -51,6 +52,7 @@ const processSampleIfPossible = async (state) => {
|
|
|
51
52
|
transportStream: state.transportStream,
|
|
52
53
|
makeSamplesStartAtZero: state.makeSamplesStartAtZero,
|
|
53
54
|
transportStreamEntry: stream,
|
|
55
|
+
avcState: state.avc,
|
|
54
56
|
});
|
|
55
57
|
processed = true;
|
|
56
58
|
break;
|
|
@@ -1,5 +1,6 @@
|
|
|
1
1
|
import type { MediaParserLogLevel } from '../../log';
|
|
2
2
|
import type { TransportStreamStructure } from '../../parse-result';
|
|
3
|
+
import type { AvcState } from '../../state/avc/avc-state';
|
|
3
4
|
import type { CallbacksState } from '../../state/sample-callbacks';
|
|
4
5
|
import type { TransportStreamState } from '../../state/transport-stream/transport-stream';
|
|
5
6
|
import type { MediaParserOnAudioTrack, MediaParserOnVideoTrack } from '../../webcodec-sample-types';
|
|
@@ -17,7 +18,7 @@ export declare const makeTransportStreamPacketBuffer: ({ buffers, pesHeader, off
|
|
|
17
18
|
offset: number;
|
|
18
19
|
}) => TransportStreamPacketBuffer;
|
|
19
20
|
export type StreamBufferMap = Map<number, TransportStreamPacketBuffer>;
|
|
20
|
-
export declare const processStreamBuffer: ({ streamBuffer, programId, structure, sampleCallbacks, logLevel, onAudioTrack, onVideoTrack, transportStream, makeSamplesStartAtZero, }: {
|
|
21
|
+
export declare const processStreamBuffer: ({ streamBuffer, programId, structure, sampleCallbacks, logLevel, onAudioTrack, onVideoTrack, transportStream, makeSamplesStartAtZero, avcState, }: {
|
|
21
22
|
streamBuffer: TransportStreamPacketBuffer;
|
|
22
23
|
programId: number;
|
|
23
24
|
structure: TransportStreamStructure;
|
|
@@ -27,8 +28,9 @@ export declare const processStreamBuffer: ({ streamBuffer, programId, structure,
|
|
|
27
28
|
onVideoTrack: MediaParserOnVideoTrack | null;
|
|
28
29
|
transportStream: TransportStreamState;
|
|
29
30
|
makeSamplesStartAtZero: boolean;
|
|
31
|
+
avcState: AvcState;
|
|
30
32
|
}) => Promise<void>;
|
|
31
|
-
export declare const processFinalStreamBuffers: ({ structure, sampleCallbacks, logLevel, onAudioTrack, onVideoTrack, transportStream, makeSamplesStartAtZero, }: {
|
|
33
|
+
export declare const processFinalStreamBuffers: ({ structure, sampleCallbacks, logLevel, onAudioTrack, onVideoTrack, transportStream, makeSamplesStartAtZero, avcState, }: {
|
|
32
34
|
structure: TransportStreamStructure;
|
|
33
35
|
sampleCallbacks: CallbacksState;
|
|
34
36
|
logLevel: MediaParserLogLevel;
|
|
@@ -36,4 +38,5 @@ export declare const processFinalStreamBuffers: ({ structure, sampleCallbacks, l
|
|
|
36
38
|
onVideoTrack: MediaParserOnVideoTrack | null;
|
|
37
39
|
transportStream: TransportStreamState;
|
|
38
40
|
makeSamplesStartAtZero: boolean;
|
|
41
|
+
avcState: AvcState;
|
|
39
42
|
}) => Promise<void>;
|
|
@@ -53,7 +53,7 @@ const makeTransportStreamPacketBuffer = ({ buffers, pesHeader, offset, }) => {
|
|
|
53
53
|
};
|
|
54
54
|
};
|
|
55
55
|
exports.makeTransportStreamPacketBuffer = makeTransportStreamPacketBuffer;
|
|
56
|
-
const processStreamBuffer = async ({ streamBuffer, programId, structure, sampleCallbacks, logLevel, onAudioTrack, onVideoTrack, transportStream, makeSamplesStartAtZero, }) => {
|
|
56
|
+
const processStreamBuffer = async ({ streamBuffer, programId, structure, sampleCallbacks, logLevel, onAudioTrack, onVideoTrack, transportStream, makeSamplesStartAtZero, avcState, }) => {
|
|
57
57
|
const stream = (0, traversal_1.getStreamForId)(structure, programId);
|
|
58
58
|
if (!stream) {
|
|
59
59
|
throw new Error('No stream found');
|
|
@@ -73,6 +73,7 @@ const processStreamBuffer = async ({ streamBuffer, programId, structure, sampleC
|
|
|
73
73
|
offset: streamBuffer.offset,
|
|
74
74
|
transportStream,
|
|
75
75
|
makeSamplesStartAtZero,
|
|
76
|
+
avcState,
|
|
76
77
|
});
|
|
77
78
|
}
|
|
78
79
|
// 15 = AAC / ADTS
|
|
@@ -97,7 +98,7 @@ const processStreamBuffer = async ({ streamBuffer, programId, structure, sampleC
|
|
|
97
98
|
}
|
|
98
99
|
};
|
|
99
100
|
exports.processStreamBuffer = processStreamBuffer;
|
|
100
|
-
const processFinalStreamBuffers = async ({ structure, sampleCallbacks, logLevel, onAudioTrack, onVideoTrack, transportStream, makeSamplesStartAtZero, }) => {
|
|
101
|
+
const processFinalStreamBuffers = async ({ structure, sampleCallbacks, logLevel, onAudioTrack, onVideoTrack, transportStream, makeSamplesStartAtZero, avcState, }) => {
|
|
101
102
|
for (const [programId, buffer] of transportStream.streamBuffers) {
|
|
102
103
|
if (buffer.getBuffer().byteLength > 0) {
|
|
103
104
|
await (0, exports.processStreamBuffer)({
|
|
@@ -110,6 +111,7 @@ const processFinalStreamBuffers = async ({ structure, sampleCallbacks, logLevel,
|
|
|
110
111
|
onVideoTrack,
|
|
111
112
|
transportStream,
|
|
112
113
|
makeSamplesStartAtZero,
|
|
114
|
+
avcState,
|
|
113
115
|
});
|
|
114
116
|
transportStream.streamBuffers.delete(programId);
|
|
115
117
|
}
|
|
@@ -1,5 +1,6 @@
|
|
|
1
1
|
import type { MediaParserLogLevel } from '../../log';
|
|
2
2
|
import type { TransportStreamStructure } from '../../parse-result';
|
|
3
|
+
import type { AvcState } from '../../state/avc/avc-state';
|
|
3
4
|
import type { CallbacksState } from '../../state/sample-callbacks';
|
|
4
5
|
import type { TransportStreamState } from '../../state/transport-stream/transport-stream';
|
|
5
6
|
import type { MediaParserOnAudioTrack, MediaParserOnVideoTrack } from '../../webcodec-sample-types';
|
|
@@ -7,7 +8,7 @@ import type { TransportStreamPacketBuffer } from './process-stream-buffers';
|
|
|
7
8
|
export declare const canProcessVideo: ({ streamBuffer, }: {
|
|
8
9
|
streamBuffer: TransportStreamPacketBuffer;
|
|
9
10
|
}) => boolean;
|
|
10
|
-
export declare const processVideo: ({ programId, structure, streamBuffer, sampleCallbacks, logLevel, onAudioTrack, onVideoTrack, transportStream, makeSamplesStartAtZero, }: {
|
|
11
|
+
export declare const processVideo: ({ programId, structure, streamBuffer, sampleCallbacks, logLevel, onAudioTrack, onVideoTrack, transportStream, makeSamplesStartAtZero, avcState, }: {
|
|
11
12
|
programId: number;
|
|
12
13
|
structure: TransportStreamStructure;
|
|
13
14
|
streamBuffer: TransportStreamPacketBuffer;
|
|
@@ -17,4 +18,5 @@ export declare const processVideo: ({ programId, structure, streamBuffer, sample
|
|
|
17
18
|
onVideoTrack: MediaParserOnVideoTrack | null;
|
|
18
19
|
transportStream: TransportStreamState;
|
|
19
20
|
makeSamplesStartAtZero: boolean;
|
|
21
|
+
avcState: AvcState;
|
|
20
22
|
}) => Promise<Uint8Array>;
|
|
@@ -10,7 +10,7 @@ const canProcessVideo = ({ streamBuffer, }) => {
|
|
|
10
10
|
return true;
|
|
11
11
|
};
|
|
12
12
|
exports.canProcessVideo = canProcessVideo;
|
|
13
|
-
const processVideo = async ({ programId, structure, streamBuffer, sampleCallbacks, logLevel, onAudioTrack, onVideoTrack, transportStream, makeSamplesStartAtZero, }) => {
|
|
13
|
+
const processVideo = async ({ programId, structure, streamBuffer, sampleCallbacks, logLevel, onAudioTrack, onVideoTrack, transportStream, makeSamplesStartAtZero, avcState, }) => {
|
|
14
14
|
const indexOfSeparator = streamBuffer.get2ndSubArrayIndex();
|
|
15
15
|
if (indexOfSeparator === -1 || indexOfSeparator === 0) {
|
|
16
16
|
throw new Error('cannot process avc stream');
|
|
@@ -33,6 +33,7 @@ const processVideo = async ({ programId, structure, streamBuffer, sampleCallback
|
|
|
33
33
|
onVideoTrack,
|
|
34
34
|
transportStream,
|
|
35
35
|
makeSamplesStartAtZero,
|
|
36
|
+
avcState,
|
|
36
37
|
});
|
|
37
38
|
return rest;
|
|
38
39
|
};
|
|
@@ -1,4 +1,5 @@
|
|
|
1
1
|
import type { MediaParserLogLevel } from '../../log';
|
|
2
|
+
import type { AvcState } from '../../state/avc/avc-state';
|
|
2
3
|
import type { WebmState } from '../../state/matroska/webm';
|
|
3
4
|
import type { CallbacksState } from '../../state/sample-callbacks';
|
|
4
5
|
import type { StructureState } from '../../state/structure';
|
|
@@ -16,7 +17,7 @@ type SampleResult = {
|
|
|
16
17
|
} | {
|
|
17
18
|
type: 'no-sample';
|
|
18
19
|
};
|
|
19
|
-
export declare const getSampleFromBlock: ({ ebml, webmState, offset, structureState, callbacks, logLevel, onVideoTrack, }: {
|
|
20
|
+
export declare const getSampleFromBlock: ({ ebml, webmState, offset, structureState, callbacks, logLevel, onVideoTrack, avcState, }: {
|
|
20
21
|
ebml: BlockSegment | SimpleBlockSegment;
|
|
21
22
|
webmState: WebmState;
|
|
22
23
|
offset: number;
|
|
@@ -24,5 +25,6 @@ export declare const getSampleFromBlock: ({ ebml, webmState, offset, structureSt
|
|
|
24
25
|
callbacks: CallbacksState;
|
|
25
26
|
logLevel: MediaParserLogLevel;
|
|
26
27
|
onVideoTrack: MediaParserOnVideoTrack | null;
|
|
28
|
+
avcState: AvcState;
|
|
27
29
|
}) => Promise<SampleResult>;
|
|
28
30
|
export {};
|
|
@@ -7,7 +7,7 @@ const parse_avc_1 = require("../avc/parse-avc");
|
|
|
7
7
|
const get_ready_tracks_1 = require("./get-ready-tracks");
|
|
8
8
|
const all_segments_1 = require("./segments/all-segments");
|
|
9
9
|
const block_simple_block_flags_1 = require("./segments/block-simple-block-flags");
|
|
10
|
-
const addAvcToTrackAndActivateTrackIfNecessary = async ({ partialVideoSample, codec, structureState, webmState, trackNumber, logLevel, callbacks, onVideoTrack, }) => {
|
|
10
|
+
const addAvcToTrackAndActivateTrackIfNecessary = async ({ partialVideoSample, codec, structureState, webmState, trackNumber, logLevel, callbacks, onVideoTrack, avcState, }) => {
|
|
11
11
|
if (codec !== 'V_MPEG4/ISO/AVC') {
|
|
12
12
|
return;
|
|
13
13
|
}
|
|
@@ -18,7 +18,7 @@ const addAvcToTrackAndActivateTrackIfNecessary = async ({ partialVideoSample, co
|
|
|
18
18
|
if (missingTracks.length === 0) {
|
|
19
19
|
return;
|
|
20
20
|
}
|
|
21
|
-
const parsed = (0, parse_avc_1.parseAvc)(partialVideoSample.data);
|
|
21
|
+
const parsed = (0, parse_avc_1.parseAvc)(partialVideoSample.data, avcState);
|
|
22
22
|
for (const parse of parsed) {
|
|
23
23
|
if (parse.type === 'avc-profile') {
|
|
24
24
|
webmState.setAvcProfileForTrackNumber(trackNumber, parse);
|
|
@@ -45,7 +45,7 @@ const addAvcToTrackAndActivateTrackIfNecessary = async ({ partialVideoSample, co
|
|
|
45
45
|
}
|
|
46
46
|
}
|
|
47
47
|
};
|
|
48
|
-
const getSampleFromBlock = async ({ ebml, webmState, offset, structureState, callbacks, logLevel, onVideoTrack, }) => {
|
|
48
|
+
const getSampleFromBlock = async ({ ebml, webmState, offset, structureState, callbacks, logLevel, onVideoTrack, avcState, }) => {
|
|
49
49
|
const iterator = (0, buffer_iterator_1.getArrayBufferIterator)(ebml.value, ebml.value.length);
|
|
50
50
|
const trackNumber = iterator.getVint();
|
|
51
51
|
if (trackNumber === null) {
|
|
@@ -99,6 +99,7 @@ const getSampleFromBlock = async ({ ebml, webmState, offset, structureState, cal
|
|
|
99
99
|
callbacks,
|
|
100
100
|
logLevel,
|
|
101
101
|
onVideoTrack,
|
|
102
|
+
avcState,
|
|
102
103
|
});
|
|
103
104
|
const sample = {
|
|
104
105
|
...partialVideoSample,
|