@remotion/media-parser 4.0.303 → 4.0.305
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/containers/aac/parse-aac.js +8 -6
- package/dist/containers/flac/parse-flac-frame.js +6 -6
- package/dist/containers/flac/parse-streaminfo.js +3 -1
- package/dist/containers/iso-base-media/find-keyframe-before-time.js +2 -2
- package/dist/containers/iso-base-media/get-keyframes.js +3 -3
- package/dist/containers/iso-base-media/get-sample-position-bounds.js +2 -2
- package/dist/containers/iso-base-media/get-seeking-byte-from-fragmented-mp4.js +3 -3
- package/dist/containers/iso-base-media/get-seeking-byte.js +1 -1
- package/dist/containers/iso-base-media/make-track.js +7 -3
- package/dist/containers/iso-base-media/mdat/calculate-jump-marks.js +3 -3
- package/dist/containers/iso-base-media/mdat/mdat.js +16 -16
- package/dist/containers/iso-base-media/parse-icc-profile.js +2 -2
- package/dist/containers/m3u/first-sample-in-m3u-chunk.js +2 -1
- package/dist/containers/m3u/sample-sorter.js +2 -2
- package/dist/containers/mp3/parse-mpeg-header.js +7 -2
- package/dist/containers/mp3/seek/audio-sample-from-cbr.js +4 -6
- package/dist/containers/mp3/seek/audio-sample-from-vbr.js +4 -6
- package/dist/containers/riff/convert-queued-sample-to-mediaparser-sample.d.ts +6 -5
- package/dist/containers/riff/convert-queued-sample-to-mediaparser-sample.js +7 -7
- package/dist/containers/riff/get-tracks-from-avi.js +5 -2
- package/dist/containers/riff/parse-movi.js +35 -25
- package/dist/containers/riff/parse-riff-body.js +13 -3
- package/dist/containers/riff/seek/fetch-idx1.js +4 -1
- package/dist/containers/transport-stream/handle-aac-packet.js +8 -7
- package/dist/containers/transport-stream/handle-avc-packet.js +8 -7
- package/dist/containers/wav/parse-fmt.js +3 -1
- package/dist/containers/wav/parse-media-section.js +5 -5
- package/dist/containers/webm/get-sample-from-block.d.ts +6 -0
- package/dist/containers/webm/get-sample-from-block.js +9 -8
- package/dist/containers/webm/make-track.js +5 -2
- package/dist/containers/webm/parse-ebml.js +12 -3
- package/dist/containers/webm/seek/seeking-hints.js +1 -1
- package/dist/convert-audio-or-video-sample.js +6 -9
- package/dist/download-and-parse-media.js +3 -0
- package/dist/esm/index.mjs +266 -196
- package/dist/esm/worker-server-entry.mjs +262 -196
- package/dist/esm/worker-web-entry.mjs +262 -196
- package/dist/get-duration.js +2 -2
- package/dist/get-sample-positions-from-mp4.js +2 -2
- package/dist/get-sample-positions.d.ts +2 -2
- package/dist/get-sample-positions.js +2 -2
- package/dist/get-tracks.d.ts +6 -3
- package/dist/index.d.ts +24 -5
- package/dist/index.js +3 -1
- package/dist/internal-parse-media.js +3 -0
- package/dist/iterator/buffer-manager.js +1 -1
- package/dist/parse-media.js +3 -0
- package/dist/samples-from-moof.js +2 -2
- package/dist/state/iso-base-media/cached-sample-positions.d.ts +2 -2
- package/dist/state/parser-state.d.ts +23 -5
- package/dist/state/riff/queued-frames.d.ts +14 -3
- package/dist/state/riff/queued-frames.js +3 -3
- package/dist/state/riff/sample-counter.d.ts +4 -1
- package/dist/state/riff/sample-counter.js +8 -7
- package/dist/state/riff.d.ts +15 -3
- package/dist/state/sample-callbacks.d.ts +8 -2
- package/dist/state/sample-callbacks.js +5 -4
- package/dist/state/samples-observed/slow-duration-fps.js +7 -6
- package/dist/version.d.ts +1 -1
- package/dist/version.js +1 -1
- package/dist/webcodec-sample-types.d.ts +3 -9
- package/dist/webcodecs-timescale.d.ts +1 -0
- package/dist/webcodecs-timescale.js +4 -0
- package/dist/worker-server.js +2 -1
- package/package.json +3 -3
|
@@ -4,6 +4,7 @@ exports.parseAac = void 0;
|
|
|
4
4
|
const aac_codecprivate_1 = require("../../aac-codecprivate");
|
|
5
5
|
const convert_audio_or_video_sample_1 = require("../../convert-audio-or-video-sample");
|
|
6
6
|
const register_track_1 = require("../../register-track");
|
|
7
|
+
const webcodecs_timescale_1 = require("../../webcodecs-timescale");
|
|
7
8
|
const parseAac = async (state) => {
|
|
8
9
|
const { iterator } = state;
|
|
9
10
|
const startOffset = iterator.counter.getOffset();
|
|
@@ -59,10 +60,11 @@ const parseAac = async (state) => {
|
|
|
59
60
|
description: codecPrivate,
|
|
60
61
|
numberOfChannels: channelConfiguration,
|
|
61
62
|
sampleRate,
|
|
62
|
-
|
|
63
|
+
originalTimescale: webcodecs_timescale_1.WEBCODECS_TIMESCALE,
|
|
63
64
|
trackId: 0,
|
|
64
65
|
type: 'audio',
|
|
65
66
|
startInSeconds: 0,
|
|
67
|
+
timescale: webcodecs_timescale_1.WEBCODECS_TIMESCALE,
|
|
66
68
|
},
|
|
67
69
|
registerAudioSampleCallback: state.callbacks.registerAudioSampleCallback,
|
|
68
70
|
tracks: state.callbacks.tracks,
|
|
@@ -86,15 +88,15 @@ const parseAac = async (state) => {
|
|
|
86
88
|
type: 'key',
|
|
87
89
|
data,
|
|
88
90
|
offset: startOffset,
|
|
89
|
-
|
|
90
|
-
trackId: 0,
|
|
91
|
-
cts: timestamp,
|
|
92
|
-
dts: timestamp,
|
|
91
|
+
decodingTimestamp: timestamp,
|
|
93
92
|
timestamp,
|
|
94
93
|
},
|
|
95
94
|
timescale: 1,
|
|
96
95
|
});
|
|
97
|
-
await state.callbacks.onAudioSample(
|
|
96
|
+
await state.callbacks.onAudioSample({
|
|
97
|
+
audioSample,
|
|
98
|
+
trackId: 0,
|
|
99
|
+
});
|
|
98
100
|
return Promise.resolve(null);
|
|
99
101
|
};
|
|
100
102
|
exports.parseAac = parseAac;
|
|
@@ -64,7 +64,7 @@ const parseFrameHeader = ({ iterator, state, }) => {
|
|
|
64
64
|
};
|
|
65
65
|
exports.parseFrameHeader = parseFrameHeader;
|
|
66
66
|
const emitSample = async ({ state, data, offset, }) => {
|
|
67
|
-
const iterator = (0, buffer_iterator_1.getArrayBufferIterator)(data,
|
|
67
|
+
const iterator = (0, buffer_iterator_1.getArrayBufferIterator)(data, data.length);
|
|
68
68
|
const parsed = (0, exports.parseFrameHeader)({ iterator, state });
|
|
69
69
|
if (!parsed) {
|
|
70
70
|
throw new Error('Invalid CRC');
|
|
@@ -89,17 +89,17 @@ const emitSample = async ({ state, data, offset, }) => {
|
|
|
89
89
|
sample: {
|
|
90
90
|
data,
|
|
91
91
|
duration,
|
|
92
|
-
|
|
93
|
-
dts: timestamp,
|
|
92
|
+
decodingTimestamp: timestamp,
|
|
94
93
|
timestamp,
|
|
95
94
|
type: 'key',
|
|
96
95
|
offset,
|
|
97
|
-
timescale: 1,
|
|
98
|
-
trackId: 0,
|
|
99
96
|
},
|
|
100
97
|
timescale: 1,
|
|
101
98
|
});
|
|
102
|
-
await state.callbacks.onAudioSample(
|
|
99
|
+
await state.callbacks.onAudioSample({
|
|
100
|
+
audioSample,
|
|
101
|
+
trackId: 0,
|
|
102
|
+
});
|
|
103
103
|
iterator.destroy();
|
|
104
104
|
};
|
|
105
105
|
const parseFlacFrame = async ({ state, iterator, }) => {
|
|
@@ -2,6 +2,7 @@
|
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
3
|
exports.parseStreamInfo = void 0;
|
|
4
4
|
const register_track_1 = require("../../register-track");
|
|
5
|
+
const webcodecs_timescale_1 = require("../../webcodecs-timescale");
|
|
5
6
|
const parseStreamInfo = async ({ iterator, state, }) => {
|
|
6
7
|
const counter = iterator.counter.getOffset();
|
|
7
8
|
const minimumBlockSize = iterator.getUint16();
|
|
@@ -41,9 +42,10 @@ const parseStreamInfo = async ({ iterator, state, }) => {
|
|
|
41
42
|
codecEnum: 'flac',
|
|
42
43
|
numberOfChannels: channels,
|
|
43
44
|
sampleRate,
|
|
44
|
-
|
|
45
|
+
originalTimescale: webcodecs_timescale_1.WEBCODECS_TIMESCALE,
|
|
45
46
|
trackId: 0,
|
|
46
47
|
startInSeconds: 0,
|
|
48
|
+
timescale: webcodecs_timescale_1.WEBCODECS_TIMESCALE,
|
|
47
49
|
},
|
|
48
50
|
registerAudioSampleCallback: state.callbacks.registerAudioSampleCallback,
|
|
49
51
|
tracks: state.callbacks.tracks,
|
|
@@ -6,8 +6,8 @@ const findKeyframeBeforeTime = ({ samplePositions, time, timescale, mediaSection
|
|
|
6
6
|
let videoByte = 0;
|
|
7
7
|
let videoSample = null;
|
|
8
8
|
for (const sample of samplePositions) {
|
|
9
|
-
const ctsInSeconds = sample.
|
|
10
|
-
const dtsInSeconds = sample.
|
|
9
|
+
const ctsInSeconds = sample.timestamp / timescale + startInSeconds;
|
|
10
|
+
const dtsInSeconds = sample.decodingTimestamp / timescale + startInSeconds;
|
|
11
11
|
if (!sample.isKeyframe) {
|
|
12
12
|
continue;
|
|
13
13
|
}
|
|
@@ -21,7 +21,7 @@ const getKeyframesFromIsoBaseMedia = (state) => {
|
|
|
21
21
|
return [];
|
|
22
22
|
}
|
|
23
23
|
const allSamples = videoTracks.map((t) => {
|
|
24
|
-
const {
|
|
24
|
+
const { originalTimescale: ts } = t;
|
|
25
25
|
const trakBox = (0, traversal_1.getTrakBoxByTrackId)(moov, t.trackId);
|
|
26
26
|
if (!trakBox) {
|
|
27
27
|
return [];
|
|
@@ -44,8 +44,8 @@ const getKeyframesFromIsoBaseMedia = (state) => {
|
|
|
44
44
|
.map((k) => {
|
|
45
45
|
return {
|
|
46
46
|
trackId: t.trackId,
|
|
47
|
-
presentationTimeInSeconds: k.
|
|
48
|
-
decodingTimeInSeconds: k.
|
|
47
|
+
presentationTimeInSeconds: k.timestamp / ts,
|
|
48
|
+
decodingTimeInSeconds: k.decodingTimestamp / ts,
|
|
49
49
|
positionInBytes: k.offset,
|
|
50
50
|
sizeInBytes: k.size,
|
|
51
51
|
};
|
|
@@ -6,8 +6,8 @@ const getSamplePositionBounds = (samplePositions, timescale) => {
|
|
|
6
6
|
let min = Infinity;
|
|
7
7
|
let max = -Infinity;
|
|
8
8
|
for (const samplePosition of samplePositions) {
|
|
9
|
-
const timestampMin = Math.min(samplePosition.
|
|
10
|
-
const timestampMax = Math.max(samplePosition.
|
|
9
|
+
const timestampMin = Math.min(samplePosition.timestamp, samplePosition.decodingTimestamp);
|
|
10
|
+
const timestampMax = Math.max(samplePosition.timestamp, samplePosition.decodingTimestamp) +
|
|
11
11
|
((_a = samplePosition.duration) !== null && _a !== void 0 ? _a : 0);
|
|
12
12
|
if (timestampMin < min) {
|
|
13
13
|
min = timestampMin;
|
|
@@ -44,14 +44,14 @@ const getSeekingByteFromFragmentedMp4 = async ({ info, time, logLevel, currentPo
|
|
|
44
44
|
});
|
|
45
45
|
log_1.Log.trace(logLevel, 'Fragmented MP4 - Checking if we have seeking info for this time range');
|
|
46
46
|
for (const positions of samplePositionsArray) {
|
|
47
|
-
const { min, max } = (0, get_sample_position_bounds_1.getSamplePositionBounds)(positions.samples, firstTrack.
|
|
47
|
+
const { min, max } = (0, get_sample_position_bounds_1.getSamplePositionBounds)(positions.samples, firstTrack.originalTimescale);
|
|
48
48
|
if (min <= time &&
|
|
49
49
|
(positions.isLastFragment || isLastChunkInPlaylist || time <= max)) {
|
|
50
50
|
log_1.Log.trace(logLevel, `Fragmented MP4 - Found that we have seeking info for this time range: ${min} <= ${time} <= ${max}`);
|
|
51
51
|
const kf = (0, find_keyframe_before_time_1.findKeyframeBeforeTime)({
|
|
52
52
|
samplePositions: positions.samples,
|
|
53
53
|
time,
|
|
54
|
-
timescale: firstTrack.
|
|
54
|
+
timescale: firstTrack.originalTimescale,
|
|
55
55
|
logLevel,
|
|
56
56
|
mediaSections: info.mediaSections,
|
|
57
57
|
startInSeconds: firstTrack.startInSeconds,
|
|
@@ -72,7 +72,7 @@ const getSeekingByteFromFragmentedMp4 = async ({ info, time, logLevel, currentPo
|
|
|
72
72
|
mfra: atom,
|
|
73
73
|
time,
|
|
74
74
|
firstTrack,
|
|
75
|
-
timescale: firstTrack.
|
|
75
|
+
timescale: firstTrack.originalTimescale,
|
|
76
76
|
});
|
|
77
77
|
if (moofOffset !== null &&
|
|
78
78
|
!(moofOffset.start <= currentPosition && currentPosition < moofOffset.end)) {
|
|
@@ -50,7 +50,7 @@ const getSeekingByteFromIsoBaseMedia = ({ info, time, logLevel, currentPosition,
|
|
|
50
50
|
const keyframe = (0, find_keyframe_before_time_1.findKeyframeBeforeTime)({
|
|
51
51
|
samplePositions,
|
|
52
52
|
time,
|
|
53
|
-
timescale: track.
|
|
53
|
+
timescale: track.originalTimescale,
|
|
54
54
|
logLevel,
|
|
55
55
|
mediaSections: info.mediaSections,
|
|
56
56
|
startInSeconds: track.startInSeconds,
|
|
@@ -5,6 +5,7 @@ const get_audio_codec_1 = require("../../get-audio-codec");
|
|
|
5
5
|
const get_fps_1 = require("../../get-fps");
|
|
6
6
|
const get_sample_aspect_ratio_1 = require("../../get-sample-aspect-ratio");
|
|
7
7
|
const get_video_codec_1 = require("../../get-video-codec");
|
|
8
|
+
const webcodecs_timescale_1 = require("../../webcodecs-timescale");
|
|
8
9
|
const color_to_webcodecs_colors_1 = require("./color-to-webcodecs-colors");
|
|
9
10
|
const get_actual_number_of_channels_1 = require("./get-actual-number-of-channels");
|
|
10
11
|
const get_video_codec_from_iso_track_1 = require("./get-video-codec-from-iso-track");
|
|
@@ -41,7 +42,7 @@ const makeBaseMediaTrack = (trakBox, startTimeInSeconds) => {
|
|
|
41
42
|
return {
|
|
42
43
|
type: 'audio',
|
|
43
44
|
trackId: tkhdBox.trackId,
|
|
44
|
-
|
|
45
|
+
originalTimescale: timescaleAndDuration.timescale,
|
|
45
46
|
codec: codecString,
|
|
46
47
|
numberOfChannels: actual.numberOfChannels,
|
|
47
48
|
sampleRate: actual.sampleRate,
|
|
@@ -49,15 +50,17 @@ const makeBaseMediaTrack = (trakBox, startTimeInSeconds) => {
|
|
|
49
50
|
codecData: actual.codecPrivate,
|
|
50
51
|
codecEnum,
|
|
51
52
|
startInSeconds: startTimeInSeconds,
|
|
53
|
+
timescale: webcodecs_timescale_1.WEBCODECS_TIMESCALE,
|
|
52
54
|
};
|
|
53
55
|
}
|
|
54
56
|
if (!(0, get_fps_1.trakBoxContainsVideo)(trakBox)) {
|
|
55
57
|
return {
|
|
56
58
|
type: 'other',
|
|
57
59
|
trackId: tkhdBox.trackId,
|
|
58
|
-
|
|
60
|
+
originalTimescale: timescaleAndDuration.timescale,
|
|
59
61
|
trakBox,
|
|
60
62
|
startInSeconds: startTimeInSeconds,
|
|
63
|
+
timescale: webcodecs_timescale_1.WEBCODECS_TIMESCALE,
|
|
61
64
|
};
|
|
62
65
|
}
|
|
63
66
|
const videoSample = (0, get_sample_aspect_ratio_1.getStsdVideoConfig)(trakBox);
|
|
@@ -90,7 +93,7 @@ const makeBaseMediaTrack = (trakBox, startTimeInSeconds) => {
|
|
|
90
93
|
type: 'video',
|
|
91
94
|
trackId: tkhdBox.trackId,
|
|
92
95
|
description: videoDescriptors !== null && videoDescriptors !== void 0 ? videoDescriptors : undefined,
|
|
93
|
-
|
|
96
|
+
originalTimescale: timescaleAndDuration.timescale,
|
|
94
97
|
codec,
|
|
95
98
|
sampleAspectRatio: (0, get_sample_aspect_ratio_1.getSampleAspectRatio)(trakBox),
|
|
96
99
|
width,
|
|
@@ -107,6 +110,7 @@ const makeBaseMediaTrack = (trakBox, startTimeInSeconds) => {
|
|
|
107
110
|
codecEnum: (0, get_video_codec_from_iso_track_1.getVideoCodecFromIsoTrak)(trakBox),
|
|
108
111
|
fps: (0, get_fps_1.getFpsFromMp4TrakBox)(trakBox),
|
|
109
112
|
startInSeconds: startTimeInSeconds,
|
|
113
|
+
timescale: webcodecs_timescale_1.WEBCODECS_TIMESCALE,
|
|
110
114
|
};
|
|
111
115
|
return track;
|
|
112
116
|
};
|
|
@@ -13,7 +13,7 @@ exports.calculateJumpMarks = void 0;
|
|
|
13
13
|
// Therefore, we need to emit them to be less than 10 seconds apart
|
|
14
14
|
const MAX_SPREAD_IN_SECONDS = 8;
|
|
15
15
|
const getKey = (samplePositionTrack) => {
|
|
16
|
-
return `${samplePositionTrack.track.trackId}-${samplePositionTrack.samplePosition.
|
|
16
|
+
return `${samplePositionTrack.track.trackId}-${samplePositionTrack.samplePosition.decodingTimestamp}`;
|
|
17
17
|
};
|
|
18
18
|
const findBestJump = ({ allSamplesSortedByOffset, visited, progresses, }) => {
|
|
19
19
|
var _a;
|
|
@@ -107,8 +107,8 @@ const calculateJumpMarks = (samplePositionTracks, endOfMdat) => {
|
|
|
107
107
|
addFinalJumpIfNecessary();
|
|
108
108
|
break;
|
|
109
109
|
}
|
|
110
|
-
const timestamp = currentSamplePosition.samplePosition.
|
|
111
|
-
currentSamplePosition.track.
|
|
110
|
+
const timestamp = currentSamplePosition.samplePosition.decodingTimestamp /
|
|
111
|
+
currentSamplePosition.track.originalTimescale;
|
|
112
112
|
progresses[currentSamplePosition.track.trackId] = timestamp;
|
|
113
113
|
const progressValues = Object.values(progresses);
|
|
114
114
|
const maxProgress = Math.max(...progressValues);
|
|
@@ -107,10 +107,10 @@ const parseMdatSection = async (state) => {
|
|
|
107
107
|
if (iterator.bytesRemaining() < samplesWithIndex.samplePosition.size) {
|
|
108
108
|
return (0, skip_1.makeFetchMoreData)(samplesWithIndex.samplePosition.size - iterator.bytesRemaining());
|
|
109
109
|
}
|
|
110
|
-
const {
|
|
111
|
-
const {
|
|
112
|
-
const cts = rawCts + startInSeconds *
|
|
113
|
-
const dts = rawDts + startInSeconds *
|
|
110
|
+
const { timestamp: rawCts, decodingTimestamp: rawDts, duration, isKeyframe, offset, bigEndian, chunkSize, } = samplesWithIndex.samplePosition;
|
|
111
|
+
const { originalTimescale, startInSeconds } = samplesWithIndex.track;
|
|
112
|
+
const cts = rawCts + startInSeconds * originalTimescale;
|
|
113
|
+
const dts = rawDts + startInSeconds * originalTimescale;
|
|
114
114
|
const bytes = (0, postprocess_bytes_1.postprocessBytes)({
|
|
115
115
|
bytes: iterator.getSlice(samplesWithIndex.samplePosition.size),
|
|
116
116
|
bigEndian,
|
|
@@ -122,16 +122,16 @@ const parseMdatSection = async (state) => {
|
|
|
122
122
|
data: bytes,
|
|
123
123
|
timestamp: cts,
|
|
124
124
|
duration,
|
|
125
|
-
|
|
126
|
-
dts,
|
|
127
|
-
trackId: samplesWithIndex.track.trackId,
|
|
125
|
+
decodingTimestamp: dts,
|
|
128
126
|
type: isKeyframe ? 'key' : 'delta',
|
|
129
127
|
offset,
|
|
130
|
-
timescale,
|
|
131
128
|
},
|
|
132
|
-
timescale,
|
|
129
|
+
timescale: originalTimescale,
|
|
130
|
+
});
|
|
131
|
+
await state.callbacks.onAudioSample({
|
|
132
|
+
audioSample,
|
|
133
|
+
trackId: samplesWithIndex.track.trackId,
|
|
133
134
|
});
|
|
134
|
-
await state.callbacks.onAudioSample(samplesWithIndex.track.trackId, audioSample);
|
|
135
135
|
}
|
|
136
136
|
if (samplesWithIndex.track.type === 'video') {
|
|
137
137
|
// https://remotion-assets.s3.eu-central-1.amazonaws.com/example-videos/sei_checkpoint.mp4
|
|
@@ -151,16 +151,16 @@ const parseMdatSection = async (state) => {
|
|
|
151
151
|
data: bytes,
|
|
152
152
|
timestamp: cts,
|
|
153
153
|
duration,
|
|
154
|
-
|
|
155
|
-
dts,
|
|
156
|
-
trackId: samplesWithIndex.track.trackId,
|
|
154
|
+
decodingTimestamp: dts,
|
|
157
155
|
type: isKeyframe && !isRecoveryPoint ? 'key' : 'delta',
|
|
158
156
|
offset,
|
|
159
|
-
timescale,
|
|
160
157
|
},
|
|
161
|
-
timescale,
|
|
158
|
+
timescale: originalTimescale,
|
|
159
|
+
});
|
|
160
|
+
await state.callbacks.onVideoSample({
|
|
161
|
+
videoSample,
|
|
162
|
+
trackId: samplesWithIndex.track.trackId,
|
|
162
163
|
});
|
|
163
|
-
await state.callbacks.onVideoSample(samplesWithIndex.track.trackId, videoSample);
|
|
164
164
|
}
|
|
165
165
|
const jump = jumpMarks.find((j) => j.afterSampleWithOffset === offset);
|
|
166
166
|
if (jump) {
|
|
@@ -3,7 +3,7 @@ Object.defineProperty(exports, "__esModule", { value: true });
|
|
|
3
3
|
exports.parseIccProfile = void 0;
|
|
4
4
|
const buffer_iterator_1 = require("../../iterator/buffer-iterator");
|
|
5
5
|
const parseIccProfile = (data) => {
|
|
6
|
-
const iterator = (0, buffer_iterator_1.getArrayBufferIterator)(data,
|
|
6
|
+
const iterator = (0, buffer_iterator_1.getArrayBufferIterator)(data, data.length);
|
|
7
7
|
const size = iterator.getUint32();
|
|
8
8
|
if (size !== data.length) {
|
|
9
9
|
throw new Error('Invalid ICC profile size');
|
|
@@ -52,7 +52,7 @@ const parseIccProfile = (data) => {
|
|
|
52
52
|
entry.tag === 'gXYZ' ||
|
|
53
53
|
entry.tag === 'bXYZ' ||
|
|
54
54
|
entry.tag === 'wtpt') {
|
|
55
|
-
const it = (0, buffer_iterator_1.getArrayBufferIterator)(found,
|
|
55
|
+
const it = (0, buffer_iterator_1.getArrayBufferIterator)(found, found.length);
|
|
56
56
|
it.discard(4);
|
|
57
57
|
const x = it.getInt32() / 65536;
|
|
58
58
|
const y = it.getInt32() / 65536;
|
|
@@ -1,6 +1,7 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
3
|
exports.considerSeekBasedOnChunk = void 0;
|
|
4
|
+
const webcodecs_timescale_1 = require("../../webcodecs-timescale");
|
|
4
5
|
const considerSeekBasedOnChunk = async ({ sample, parentController, childController, callback, m3uState, playlistUrl, subtractChunks, chunkIndex, }) => {
|
|
5
6
|
const pendingSeek = m3uState.getSeekToSecondsToProcess(playlistUrl);
|
|
6
7
|
// If there is not even a seek to consider, just call the callback
|
|
@@ -8,7 +9,7 @@ const considerSeekBasedOnChunk = async ({ sample, parentController, childControl
|
|
|
8
9
|
await callback(sample);
|
|
9
10
|
return;
|
|
10
11
|
}
|
|
11
|
-
const timestamp = Math.min(sample.
|
|
12
|
+
const timestamp = Math.min(sample.decodingTimestamp / webcodecs_timescale_1.WEBCODECS_TIMESCALE, sample.timestamp / webcodecs_timescale_1.WEBCODECS_TIMESCALE);
|
|
12
13
|
// Already too far, now we should go to the previous chunk
|
|
13
14
|
if (timestamp > pendingSeek.targetTime &&
|
|
14
15
|
chunkIndex !== null &&
|
|
@@ -31,7 +31,7 @@ const sampleSorter = ({ logLevel, getAllChunksProcessedForPlaylist, }) => {
|
|
|
31
31
|
if (!callback) {
|
|
32
32
|
throw new Error('No callback found for audio sample');
|
|
33
33
|
}
|
|
34
|
-
latestSample[src] = sample.
|
|
34
|
+
latestSample[src] = sample.decodingTimestamp;
|
|
35
35
|
await callback(sample);
|
|
36
36
|
},
|
|
37
37
|
addVideoSample: async (src, sample) => {
|
|
@@ -39,7 +39,7 @@ const sampleSorter = ({ logLevel, getAllChunksProcessedForPlaylist, }) => {
|
|
|
39
39
|
if (!callback) {
|
|
40
40
|
throw new Error('No callback found for video sample.');
|
|
41
41
|
}
|
|
42
|
-
latestSample[src] = sample.
|
|
42
|
+
latestSample[src] = sample.decodingTimestamp;
|
|
43
43
|
await callback(sample);
|
|
44
44
|
},
|
|
45
45
|
getNextStreamToRun: (streams) => {
|
|
@@ -4,6 +4,7 @@ Object.defineProperty(exports, "__esModule", { value: true });
|
|
|
4
4
|
exports.parseMpegHeader = void 0;
|
|
5
5
|
const log_1 = require("../../log");
|
|
6
6
|
const register_track_1 = require("../../register-track");
|
|
7
|
+
const webcodecs_timescale_1 = require("../../webcodecs-timescale");
|
|
7
8
|
const parse_packet_header_1 = require("./parse-packet-header");
|
|
8
9
|
const parse_xing_1 = require("./parse-xing");
|
|
9
10
|
const audio_sample_from_cbr_1 = require("./seek/audio-sample-from-cbr");
|
|
@@ -65,9 +66,10 @@ const parseMpegHeader = async ({ state, }) => {
|
|
|
65
66
|
description: undefined,
|
|
66
67
|
numberOfChannels,
|
|
67
68
|
sampleRate,
|
|
68
|
-
|
|
69
|
+
originalTimescale: 1000000,
|
|
69
70
|
trackId: 0,
|
|
70
71
|
startInSeconds: 0,
|
|
72
|
+
timescale: webcodecs_timescale_1.WEBCODECS_TIMESCALE,
|
|
71
73
|
},
|
|
72
74
|
registerAudioSampleCallback: state.callbacks.registerAudioSampleCallback,
|
|
73
75
|
tracks: state.callbacks.tracks,
|
|
@@ -106,6 +108,9 @@ const parseMpegHeader = async ({ state, }) => {
|
|
|
106
108
|
offset: initialOffset,
|
|
107
109
|
durationInSeconds,
|
|
108
110
|
});
|
|
109
|
-
await state.callbacks.onAudioSample(
|
|
111
|
+
await state.callbacks.onAudioSample({
|
|
112
|
+
audioSample,
|
|
113
|
+
trackId: 0,
|
|
114
|
+
});
|
|
110
115
|
};
|
|
111
116
|
exports.parseMpegHeader = parseMpegHeader;
|
|
@@ -1,6 +1,7 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
3
|
exports.getAudioSampleFromCbr = void 0;
|
|
4
|
+
const webcodecs_timescale_1 = require("../../../webcodecs-timescale");
|
|
4
5
|
const get_frame_length_1 = require("../get-frame-length");
|
|
5
6
|
const getAudioSampleFromCbr = ({ bitrateInKbit, initialOffset, layer, sampleRate, samplesPerFrame, data, state, }) => {
|
|
6
7
|
const avgLength = (0, get_frame_length_1.getAverageMpegFrameLength)({
|
|
@@ -17,17 +18,14 @@ const getAudioSampleFromCbr = ({ bitrateInKbit, initialOffset, layer, sampleRate
|
|
|
17
18
|
avgLength);
|
|
18
19
|
const durationInSeconds = samplesPerFrame / sampleRate;
|
|
19
20
|
const timeInSeconds = (nthFrame * samplesPerFrame) / sampleRate;
|
|
20
|
-
const timestamp = Math.round(timeInSeconds *
|
|
21
|
-
const duration = Math.round(durationInSeconds *
|
|
21
|
+
const timestamp = Math.round(timeInSeconds * webcodecs_timescale_1.WEBCODECS_TIMESCALE);
|
|
22
|
+
const duration = Math.round(durationInSeconds * webcodecs_timescale_1.WEBCODECS_TIMESCALE);
|
|
22
23
|
const audioSample = {
|
|
23
24
|
data,
|
|
24
|
-
|
|
25
|
-
dts: timestamp,
|
|
25
|
+
decodingTimestamp: timestamp,
|
|
26
26
|
duration,
|
|
27
27
|
offset: initialOffset,
|
|
28
|
-
timescale: 1000000,
|
|
29
28
|
timestamp,
|
|
30
|
-
trackId: 0,
|
|
31
29
|
type: 'key',
|
|
32
30
|
};
|
|
33
31
|
return { audioSample, timeInSeconds, durationInSeconds };
|
|
@@ -1,6 +1,7 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
3
|
exports.getAudioSampleFromVbr = void 0;
|
|
4
|
+
const webcodecs_timescale_1 = require("../../../webcodecs-timescale");
|
|
4
5
|
const get_duration_1 = require("../get-duration");
|
|
5
6
|
const parse_xing_1 = require("../parse-xing");
|
|
6
7
|
const samples_per_mpeg_file_1 = require("../samples-per-mpeg-file");
|
|
@@ -29,17 +30,14 @@ const getAudioSampleFromVbr = ({ info, position, mp3Info, data, }) => {
|
|
|
29
30
|
tableOfContents: info.xingData.tableOfContents,
|
|
30
31
|
});
|
|
31
32
|
const durationInSeconds = samplesPerFrame / info.xingData.sampleRate;
|
|
32
|
-
const timestamp = Math.round(timeInSeconds *
|
|
33
|
-
const duration = Math.round(durationInSeconds *
|
|
33
|
+
const timestamp = Math.round(timeInSeconds * webcodecs_timescale_1.WEBCODECS_TIMESCALE);
|
|
34
|
+
const duration = Math.round(durationInSeconds * webcodecs_timescale_1.WEBCODECS_TIMESCALE);
|
|
34
35
|
const audioSample = {
|
|
35
36
|
data,
|
|
36
|
-
|
|
37
|
-
dts: timestamp,
|
|
37
|
+
decodingTimestamp: timestamp,
|
|
38
38
|
duration,
|
|
39
39
|
offset: position,
|
|
40
|
-
timescale: 1000000,
|
|
41
40
|
timestamp,
|
|
42
|
-
trackId: 0,
|
|
43
41
|
type: 'key',
|
|
44
42
|
};
|
|
45
43
|
return { timeInSeconds, audioSample, durationInSeconds };
|
|
@@ -1,14 +1,15 @@
|
|
|
1
1
|
import type { ParserState } from '../../state/parser-state';
|
|
2
2
|
import type { QueuedVideoSample } from '../../state/riff/queued-frames';
|
|
3
|
-
export declare const convertQueuedSampleToMediaParserSample: (sample
|
|
3
|
+
export declare const convertQueuedSampleToMediaParserSample: ({ sample, state, trackId, }: {
|
|
4
|
+
sample: QueuedVideoSample;
|
|
5
|
+
state: ParserState;
|
|
6
|
+
trackId: number;
|
|
7
|
+
}) => {
|
|
4
8
|
timestamp: number;
|
|
5
|
-
|
|
6
|
-
dts: number;
|
|
9
|
+
decodingTimestamp: number;
|
|
7
10
|
type: "key" | "delta";
|
|
8
11
|
data: Uint8Array;
|
|
9
12
|
duration: number | undefined;
|
|
10
|
-
trackId: number;
|
|
11
13
|
offset: number;
|
|
12
|
-
timescale: number;
|
|
13
14
|
avc?: import("../../webcodec-sample-types").MediaParserAvcExtraInfo | undefined;
|
|
14
15
|
};
|
|
@@ -3,11 +3,11 @@ Object.defineProperty(exports, "__esModule", { value: true });
|
|
|
3
3
|
exports.convertQueuedSampleToMediaParserSample = void 0;
|
|
4
4
|
const convert_audio_or_video_sample_1 = require("../../convert-audio-or-video-sample");
|
|
5
5
|
const get_strh_for_index_1 = require("./get-strh-for-index");
|
|
6
|
-
const getKeyFrameOffsetAndPocs = ({ state, sample, }) => {
|
|
6
|
+
const getKeyFrameOffsetAndPocs = ({ state, sample, trackId, }) => {
|
|
7
7
|
var _a, _b;
|
|
8
8
|
if (sample.type === 'key') {
|
|
9
9
|
const sampleOffset = state.riff.sampleCounter.getSampleCountForTrack({
|
|
10
|
-
trackId
|
|
10
|
+
trackId,
|
|
11
11
|
});
|
|
12
12
|
return {
|
|
13
13
|
sampleOffsetAtKeyframe: sampleOffset,
|
|
@@ -19,7 +19,7 @@ const getKeyFrameOffsetAndPocs = ({ state, sample, }) => {
|
|
|
19
19
|
if (!keyframeAtOffset) {
|
|
20
20
|
throw new Error('no keyframe at offset');
|
|
21
21
|
}
|
|
22
|
-
const sampleOffsetAtKeyframe = keyframeAtOffset.sampleCounts[
|
|
22
|
+
const sampleOffsetAtKeyframe = keyframeAtOffset.sampleCounts[trackId];
|
|
23
23
|
const pocsAtKeyframeOffset = state.riff.sampleCounter.getPocAtKeyframeOffset({
|
|
24
24
|
keyframeOffset: keyframeAtOffset.positionInBytes,
|
|
25
25
|
});
|
|
@@ -28,12 +28,13 @@ const getKeyFrameOffsetAndPocs = ({ state, sample, }) => {
|
|
|
28
28
|
pocsAtKeyframeOffset,
|
|
29
29
|
};
|
|
30
30
|
};
|
|
31
|
-
const convertQueuedSampleToMediaParserSample = (sample, state) => {
|
|
32
|
-
const strh = (0, get_strh_for_index_1.getStrhForIndex)(state.structure.getRiffStructure(),
|
|
31
|
+
const convertQueuedSampleToMediaParserSample = ({ sample, state, trackId, }) => {
|
|
32
|
+
const strh = (0, get_strh_for_index_1.getStrhForIndex)(state.structure.getRiffStructure(), trackId);
|
|
33
33
|
const samplesPerSecond = strh.rate / strh.scale;
|
|
34
34
|
const { sampleOffsetAtKeyframe, pocsAtKeyframeOffset } = getKeyFrameOffsetAndPocs({
|
|
35
35
|
sample,
|
|
36
36
|
state,
|
|
37
|
+
trackId,
|
|
37
38
|
});
|
|
38
39
|
const indexOfPoc = pocsAtKeyframeOffset.findIndex((poc) => { var _a; return poc === ((_a = sample.avc) === null || _a === void 0 ? void 0 : _a.poc); });
|
|
39
40
|
if (indexOfPoc === -1) {
|
|
@@ -45,8 +46,7 @@ const convertQueuedSampleToMediaParserSample = (sample, state) => {
|
|
|
45
46
|
sample: {
|
|
46
47
|
...sample,
|
|
47
48
|
timestamp,
|
|
48
|
-
|
|
49
|
-
dts: timestamp,
|
|
49
|
+
decodingTimestamp: timestamp,
|
|
50
50
|
},
|
|
51
51
|
timescale: 1,
|
|
52
52
|
});
|
|
@@ -2,6 +2,7 @@
|
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
3
|
exports.hasAllTracksFromAvi = exports.getTracksFromAvi = exports.makeAviVideoTrack = exports.makeAviAudioTrack = exports.getNumberOfTracks = exports.TO_BE_OVERRIDDEN_LATER = void 0;
|
|
4
4
|
const add_avc_profile_to_track_1 = require("../../add-avc-profile-to-track");
|
|
5
|
+
const webcodecs_timescale_1 = require("../../webcodecs-timescale");
|
|
5
6
|
const timescale_1 = require("./timescale");
|
|
6
7
|
const traversal_1 = require("./traversal");
|
|
7
8
|
exports.TO_BE_OVERRIDDEN_LATER = 'to-be-overriden-later';
|
|
@@ -26,9 +27,10 @@ const makeAviAudioTrack = ({ strf, index, }) => {
|
|
|
26
27
|
description: new Uint8Array([18, 16]),
|
|
27
28
|
numberOfChannels: strf.numberOfChannels,
|
|
28
29
|
sampleRate: strf.sampleRate,
|
|
29
|
-
|
|
30
|
+
originalTimescale: timescale_1.MEDIA_PARSER_RIFF_TIMESCALE,
|
|
30
31
|
trackId: index,
|
|
31
32
|
startInSeconds: 0,
|
|
33
|
+
timescale: webcodecs_timescale_1.WEBCODECS_TIMESCALE,
|
|
32
34
|
};
|
|
33
35
|
};
|
|
34
36
|
exports.makeAviAudioTrack = makeAviAudioTrack;
|
|
@@ -46,7 +48,7 @@ const makeAviVideoTrack = ({ strh, strf, index, }) => {
|
|
|
46
48
|
height: strf.height,
|
|
47
49
|
type: 'video',
|
|
48
50
|
displayAspectHeight: strf.height,
|
|
49
|
-
|
|
51
|
+
originalTimescale: timescale_1.MEDIA_PARSER_RIFF_TIMESCALE,
|
|
50
52
|
description: undefined,
|
|
51
53
|
m3uStreamFormat: null,
|
|
52
54
|
trackId: index,
|
|
@@ -70,6 +72,7 @@ const makeAviVideoTrack = ({ strh, strf, index, }) => {
|
|
|
70
72
|
},
|
|
71
73
|
fps: strh.rate / strh.scale,
|
|
72
74
|
startInSeconds: 0,
|
|
75
|
+
timescale: webcodecs_timescale_1.WEBCODECS_TIMESCALE,
|
|
73
76
|
};
|
|
74
77
|
};
|
|
75
78
|
exports.makeAviVideoTrack = makeAviVideoTrack;
|