@remotion/media-parser 4.0.303 → 4.0.305
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/containers/aac/parse-aac.js +8 -6
- package/dist/containers/flac/parse-flac-frame.js +6 -6
- package/dist/containers/flac/parse-streaminfo.js +3 -1
- package/dist/containers/iso-base-media/find-keyframe-before-time.js +2 -2
- package/dist/containers/iso-base-media/get-keyframes.js +3 -3
- package/dist/containers/iso-base-media/get-sample-position-bounds.js +2 -2
- package/dist/containers/iso-base-media/get-seeking-byte-from-fragmented-mp4.js +3 -3
- package/dist/containers/iso-base-media/get-seeking-byte.js +1 -1
- package/dist/containers/iso-base-media/make-track.js +7 -3
- package/dist/containers/iso-base-media/mdat/calculate-jump-marks.js +3 -3
- package/dist/containers/iso-base-media/mdat/mdat.js +16 -16
- package/dist/containers/iso-base-media/parse-icc-profile.js +2 -2
- package/dist/containers/m3u/first-sample-in-m3u-chunk.js +2 -1
- package/dist/containers/m3u/sample-sorter.js +2 -2
- package/dist/containers/mp3/parse-mpeg-header.js +7 -2
- package/dist/containers/mp3/seek/audio-sample-from-cbr.js +4 -6
- package/dist/containers/mp3/seek/audio-sample-from-vbr.js +4 -6
- package/dist/containers/riff/convert-queued-sample-to-mediaparser-sample.d.ts +6 -5
- package/dist/containers/riff/convert-queued-sample-to-mediaparser-sample.js +7 -7
- package/dist/containers/riff/get-tracks-from-avi.js +5 -2
- package/dist/containers/riff/parse-movi.js +35 -25
- package/dist/containers/riff/parse-riff-body.js +13 -3
- package/dist/containers/riff/seek/fetch-idx1.js +4 -1
- package/dist/containers/transport-stream/handle-aac-packet.js +8 -7
- package/dist/containers/transport-stream/handle-avc-packet.js +8 -7
- package/dist/containers/wav/parse-fmt.js +3 -1
- package/dist/containers/wav/parse-media-section.js +5 -5
- package/dist/containers/webm/get-sample-from-block.d.ts +6 -0
- package/dist/containers/webm/get-sample-from-block.js +9 -8
- package/dist/containers/webm/make-track.js +5 -2
- package/dist/containers/webm/parse-ebml.js +12 -3
- package/dist/containers/webm/seek/seeking-hints.js +1 -1
- package/dist/convert-audio-or-video-sample.js +6 -9
- package/dist/download-and-parse-media.js +3 -0
- package/dist/esm/index.mjs +266 -196
- package/dist/esm/worker-server-entry.mjs +262 -196
- package/dist/esm/worker-web-entry.mjs +262 -196
- package/dist/get-duration.js +2 -2
- package/dist/get-sample-positions-from-mp4.js +2 -2
- package/dist/get-sample-positions.d.ts +2 -2
- package/dist/get-sample-positions.js +2 -2
- package/dist/get-tracks.d.ts +6 -3
- package/dist/index.d.ts +24 -5
- package/dist/index.js +3 -1
- package/dist/internal-parse-media.js +3 -0
- package/dist/iterator/buffer-manager.js +1 -1
- package/dist/parse-media.js +3 -0
- package/dist/samples-from-moof.js +2 -2
- package/dist/state/iso-base-media/cached-sample-positions.d.ts +2 -2
- package/dist/state/parser-state.d.ts +23 -5
- package/dist/state/riff/queued-frames.d.ts +14 -3
- package/dist/state/riff/queued-frames.js +3 -3
- package/dist/state/riff/sample-counter.d.ts +4 -1
- package/dist/state/riff/sample-counter.js +8 -7
- package/dist/state/riff.d.ts +15 -3
- package/dist/state/sample-callbacks.d.ts +8 -2
- package/dist/state/sample-callbacks.js +5 -4
- package/dist/state/samples-observed/slow-duration-fps.js +7 -6
- package/dist/version.d.ts +1 -1
- package/dist/version.js +1 -1
- package/dist/webcodec-sample-types.d.ts +3 -9
- package/dist/webcodecs-timescale.d.ts +1 -0
- package/dist/webcodecs-timescale.js +4 -0
- package/dist/worker-server.js +2 -1
- package/package.json +3 -3
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
3
|
exports.parseMovi = exports.handleChunk = void 0;
|
|
4
|
-
const
|
|
4
|
+
const webcodecs_timescale_1 = require("../../webcodecs-timescale");
|
|
5
5
|
const key_1 = require("../avc/key");
|
|
6
6
|
const parse_avc_1 = require("../avc/parse-avc");
|
|
7
7
|
const convert_queued_sample_to_mediaparser_sample_1 = require("./convert-queued-sample-to-mediaparser-sample");
|
|
@@ -31,10 +31,8 @@ const handleChunk = async ({ state, ckId, ckSize, }) => {
|
|
|
31
31
|
// this sample would be longer. Chrome will pad it with silence.
|
|
32
32
|
// If we'd pass a duration instead, it would shift the audio and we think that audio is not finished
|
|
33
33
|
duration: 1 / samplesPerSecond,
|
|
34
|
-
trackId,
|
|
35
34
|
type: keyOrDelta === 'bidirectional' ? 'delta' : keyOrDelta,
|
|
36
35
|
offset,
|
|
37
|
-
timescale: samplesPerSecond,
|
|
38
36
|
avc: info,
|
|
39
37
|
};
|
|
40
38
|
const maxFramesInBuffer = state.avc.getMaxFramesInBuffer();
|
|
@@ -51,14 +49,29 @@ const handleChunk = async ({ state, ckId, ckSize, }) => {
|
|
|
51
49
|
poc: info.poc,
|
|
52
50
|
});
|
|
53
51
|
}
|
|
54
|
-
state.riff.queuedBFrames.addFrame(
|
|
52
|
+
state.riff.queuedBFrames.addFrame({
|
|
53
|
+
frame: rawSample,
|
|
54
|
+
trackId,
|
|
55
|
+
maxFramesInBuffer,
|
|
56
|
+
timescale: samplesPerSecond,
|
|
57
|
+
});
|
|
55
58
|
const releasedFrame = state.riff.queuedBFrames.getReleasedFrame();
|
|
56
59
|
if (!releasedFrame) {
|
|
57
60
|
return;
|
|
58
61
|
}
|
|
59
|
-
const videoSample = (0, convert_queued_sample_to_mediaparser_sample_1.convertQueuedSampleToMediaParserSample)(
|
|
60
|
-
|
|
61
|
-
|
|
62
|
+
const videoSample = (0, convert_queued_sample_to_mediaparser_sample_1.convertQueuedSampleToMediaParserSample)({
|
|
63
|
+
sample: releasedFrame.sample,
|
|
64
|
+
state,
|
|
65
|
+
trackId: releasedFrame.trackId,
|
|
66
|
+
});
|
|
67
|
+
state.riff.sampleCounter.onVideoSample({
|
|
68
|
+
trackId,
|
|
69
|
+
videoSample,
|
|
70
|
+
});
|
|
71
|
+
await state.callbacks.onVideoSample({
|
|
72
|
+
videoSample,
|
|
73
|
+
trackId,
|
|
74
|
+
});
|
|
62
75
|
}
|
|
63
76
|
const audioChunk = ckId.match(/^([0-9]{2})wb$/);
|
|
64
77
|
if (audioChunk) {
|
|
@@ -73,28 +86,25 @@ const handleChunk = async ({ state, ckId, ckSize, }) => {
|
|
|
73
86
|
trackId,
|
|
74
87
|
});
|
|
75
88
|
const timeInSec = nthSample / samplesPerSecond;
|
|
76
|
-
const timestamp = timeInSec;
|
|
89
|
+
const timestamp = Math.round(timeInSec * webcodecs_timescale_1.WEBCODECS_TIMESCALE);
|
|
77
90
|
const data = iterator.getSlice(ckSize);
|
|
78
|
-
const audioSample =
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
type: 'key',
|
|
89
|
-
offset,
|
|
90
|
-
timescale: samplesPerSecond,
|
|
91
|
-
},
|
|
92
|
-
timescale: 1,
|
|
93
|
-
});
|
|
91
|
+
const audioSample = {
|
|
92
|
+
decodingTimestamp: timestamp,
|
|
93
|
+
data, // We must also NOT pass a duration because if the the next sample is 0,
|
|
94
|
+
// this sample would be longer. Chrome will pad it with silence.
|
|
95
|
+
// If we'd pass a duration instead, it would shift the audio and we think that audio is not finished
|
|
96
|
+
duration: undefined,
|
|
97
|
+
timestamp,
|
|
98
|
+
type: 'key',
|
|
99
|
+
offset,
|
|
100
|
+
};
|
|
94
101
|
state.riff.sampleCounter.onAudioSample(trackId, audioSample);
|
|
95
102
|
// In example.avi, we have samples with 0 data
|
|
96
103
|
// Chrome fails on these
|
|
97
|
-
await state.callbacks.onAudioSample(
|
|
104
|
+
await state.callbacks.onAudioSample({
|
|
105
|
+
audioSample,
|
|
106
|
+
trackId,
|
|
107
|
+
});
|
|
98
108
|
}
|
|
99
109
|
};
|
|
100
110
|
exports.handleChunk = handleChunk;
|
|
@@ -10,9 +10,19 @@ const parse_video_section_1 = require("./parse-video-section");
|
|
|
10
10
|
const parseRiffBody = async (state) => {
|
|
11
11
|
const releasedFrame = state.riff.queuedBFrames.getReleasedFrame();
|
|
12
12
|
if (releasedFrame) {
|
|
13
|
-
const converted = (0, convert_queued_sample_to_mediaparser_sample_1.convertQueuedSampleToMediaParserSample)(
|
|
14
|
-
|
|
15
|
-
|
|
13
|
+
const converted = (0, convert_queued_sample_to_mediaparser_sample_1.convertQueuedSampleToMediaParserSample)({
|
|
14
|
+
sample: releasedFrame.sample,
|
|
15
|
+
state,
|
|
16
|
+
trackId: releasedFrame.trackId,
|
|
17
|
+
});
|
|
18
|
+
state.riff.sampleCounter.onVideoSample({
|
|
19
|
+
trackId: releasedFrame.trackId,
|
|
20
|
+
videoSample: converted,
|
|
21
|
+
});
|
|
22
|
+
await state.callbacks.onVideoSample({
|
|
23
|
+
videoSample: converted,
|
|
24
|
+
trackId: releasedFrame.trackId,
|
|
25
|
+
});
|
|
16
26
|
return null;
|
|
17
27
|
}
|
|
18
28
|
if (state.mediaSection.isCurrentByteInMediaSection(state.iterator) ===
|
|
@@ -13,7 +13,10 @@ const fetchIdx1 = async ({ src, readerInterface, controller, position, logLevel,
|
|
|
13
13
|
logLevel,
|
|
14
14
|
prefetchCache,
|
|
15
15
|
});
|
|
16
|
-
|
|
16
|
+
if (result.contentLength === null) {
|
|
17
|
+
throw new Error('Content length is null');
|
|
18
|
+
}
|
|
19
|
+
const iterator = (0, buffer_iterator_1.getArrayBufferIterator)(new Uint8Array(), result.contentLength - position + 1);
|
|
17
20
|
while (true) {
|
|
18
21
|
const res = await result.reader.reader.read();
|
|
19
22
|
if (res.value) {
|
|
@@ -4,6 +4,7 @@ exports.handleAacPacket = void 0;
|
|
|
4
4
|
const aac_codecprivate_1 = require("../../aac-codecprivate");
|
|
5
5
|
const convert_audio_or_video_sample_1 = require("../../convert-audio-or-video-sample");
|
|
6
6
|
const register_track_1 = require("../../register-track");
|
|
7
|
+
const webcodecs_timescale_1 = require("../../webcodecs-timescale");
|
|
7
8
|
const adts_header_1 = require("./adts-header");
|
|
8
9
|
const handle_avc_packet_1 = require("./handle-avc-packet");
|
|
9
10
|
const handleAacPacket = async ({ streamBuffer, programId, offset, sampleCallbacks, logLevel, onAudioTrack, transportStream, makeSamplesStartAtZero, }) => {
|
|
@@ -28,7 +29,7 @@ const handleAacPacket = async ({ streamBuffer, programId, offset, sampleCallback
|
|
|
28
29
|
type: 'audio',
|
|
29
30
|
codecData: { type: 'aac-config', data: codecPrivate },
|
|
30
31
|
trackId: programId,
|
|
31
|
-
|
|
32
|
+
originalTimescale: handle_avc_packet_1.MPEG_TIMESCALE,
|
|
32
33
|
codecEnum: 'aac',
|
|
33
34
|
codec: (0, aac_codecprivate_1.mapAudioObjectTypeToCodecString)(audioObjectType),
|
|
34
35
|
// https://www.w3.org/TR/webcodecs-aac-codec-registration/
|
|
@@ -38,6 +39,7 @@ const handleAacPacket = async ({ streamBuffer, programId, offset, sampleCallback
|
|
|
38
39
|
numberOfChannels: channelConfiguration,
|
|
39
40
|
sampleRate,
|
|
40
41
|
startInSeconds: 0,
|
|
42
|
+
timescale: webcodecs_timescale_1.WEBCODECS_TIMESCALE,
|
|
41
43
|
};
|
|
42
44
|
await (0, register_track_1.registerAudioTrack)({
|
|
43
45
|
track,
|
|
@@ -49,24 +51,23 @@ const handleAacPacket = async ({ streamBuffer, programId, offset, sampleCallback
|
|
|
49
51
|
});
|
|
50
52
|
}
|
|
51
53
|
const sample = {
|
|
52
|
-
|
|
53
|
-
transportStream.startOffset.getOffset(programId),
|
|
54
|
-
dts: ((_b = streamBuffer.pesHeader.dts) !== null && _b !== void 0 ? _b : streamBuffer.pesHeader.pts) -
|
|
54
|
+
decodingTimestamp: ((_b = streamBuffer.pesHeader.dts) !== null && _b !== void 0 ? _b : streamBuffer.pesHeader.pts) -
|
|
55
55
|
transportStream.startOffset.getOffset(programId),
|
|
56
56
|
timestamp: streamBuffer.pesHeader.pts -
|
|
57
57
|
transportStream.startOffset.getOffset(programId),
|
|
58
58
|
duration: undefined,
|
|
59
59
|
data: streamBuffer.getBuffer(),
|
|
60
|
-
trackId: programId,
|
|
61
60
|
type: 'key',
|
|
62
61
|
offset,
|
|
63
|
-
timescale: handle_avc_packet_1.MPEG_TIMESCALE,
|
|
64
62
|
};
|
|
65
63
|
const audioSample = (0, convert_audio_or_video_sample_1.convertAudioOrVideoSampleToWebCodecsTimestamps)({
|
|
66
64
|
sample,
|
|
67
65
|
timescale: handle_avc_packet_1.MPEG_TIMESCALE,
|
|
68
66
|
});
|
|
69
|
-
await sampleCallbacks.onAudioSample(
|
|
67
|
+
await sampleCallbacks.onAudioSample({
|
|
68
|
+
audioSample,
|
|
69
|
+
trackId: programId,
|
|
70
|
+
});
|
|
70
71
|
transportStream.lastEmittedSample.setLastEmittedSample(sample);
|
|
71
72
|
};
|
|
72
73
|
exports.handleAacPacket = handleAacPacket;
|
|
@@ -3,6 +3,7 @@ Object.defineProperty(exports, "__esModule", { value: true });
|
|
|
3
3
|
exports.handleAvcPacket = exports.MPEG_TIMESCALE = void 0;
|
|
4
4
|
const convert_audio_or_video_sample_1 = require("../../convert-audio-or-video-sample");
|
|
5
5
|
const register_track_1 = require("../../register-track");
|
|
6
|
+
const webcodecs_timescale_1 = require("../../webcodecs-timescale");
|
|
6
7
|
const codec_string_1 = require("../avc/codec-string");
|
|
7
8
|
const create_sps_pps_data_1 = require("../avc/create-sps-pps-data");
|
|
8
9
|
const interpret_sps_1 = require("../avc/interpret-sps");
|
|
@@ -35,7 +36,7 @@ const handleAvcPacket = async ({ streamBuffer, programId, offset, sampleCallback
|
|
|
35
36
|
rotation: 0,
|
|
36
37
|
trackId: programId,
|
|
37
38
|
type: 'video',
|
|
38
|
-
|
|
39
|
+
originalTimescale: exports.MPEG_TIMESCALE,
|
|
39
40
|
codec: (0, codec_string_1.getCodecStringFromSpsAndPps)(spsAndPps.sps),
|
|
40
41
|
codecData: { type: 'avc-sps-pps', data: codecPrivate },
|
|
41
42
|
fps: null,
|
|
@@ -57,6 +58,7 @@ const handleAvcPacket = async ({ streamBuffer, programId, offset, sampleCallback
|
|
|
57
58
|
colorSpace: (0, color_to_webcodecs_colors_1.mediaParserAdvancedColorToWebCodecsColor)(advancedColor),
|
|
58
59
|
advancedColor,
|
|
59
60
|
startInSeconds: 0,
|
|
61
|
+
timescale: webcodecs_timescale_1.WEBCODECS_TIMESCALE,
|
|
60
62
|
};
|
|
61
63
|
await (0, register_track_1.registerVideoTrack)({
|
|
62
64
|
track,
|
|
@@ -70,18 +72,14 @@ const handleAvcPacket = async ({ streamBuffer, programId, offset, sampleCallback
|
|
|
70
72
|
const type = (0, key_1.getKeyFrameOrDeltaFromAvcInfo)(avc);
|
|
71
73
|
// sample for webcodecs needs to be in nano seconds
|
|
72
74
|
const sample = {
|
|
73
|
-
|
|
74
|
-
transportStream.startOffset.getOffset(programId),
|
|
75
|
-
dts: ((_b = streamBuffer.pesHeader.dts) !== null && _b !== void 0 ? _b : streamBuffer.pesHeader.pts) -
|
|
75
|
+
decodingTimestamp: ((_b = streamBuffer.pesHeader.dts) !== null && _b !== void 0 ? _b : streamBuffer.pesHeader.pts) -
|
|
76
76
|
transportStream.startOffset.getOffset(programId),
|
|
77
77
|
timestamp: streamBuffer.pesHeader.pts -
|
|
78
78
|
transportStream.startOffset.getOffset(programId),
|
|
79
79
|
duration: undefined,
|
|
80
80
|
data: streamBuffer.getBuffer(),
|
|
81
|
-
trackId: programId,
|
|
82
81
|
type: type === 'bidirectional' ? 'delta' : type,
|
|
83
82
|
offset,
|
|
84
|
-
timescale: exports.MPEG_TIMESCALE,
|
|
85
83
|
};
|
|
86
84
|
if (type === 'key') {
|
|
87
85
|
transportStream.observedPesHeaders.markPtsAsKeyframe(streamBuffer.pesHeader.pts);
|
|
@@ -90,7 +88,10 @@ const handleAvcPacket = async ({ streamBuffer, programId, offset, sampleCallback
|
|
|
90
88
|
sample,
|
|
91
89
|
timescale: exports.MPEG_TIMESCALE,
|
|
92
90
|
});
|
|
93
|
-
await sampleCallbacks.onVideoSample(
|
|
91
|
+
await sampleCallbacks.onVideoSample({
|
|
92
|
+
videoSample,
|
|
93
|
+
trackId: programId,
|
|
94
|
+
});
|
|
94
95
|
transportStream.lastEmittedSample.setLastEmittedSample(sample);
|
|
95
96
|
};
|
|
96
97
|
exports.handleAvcPacket = handleAvcPacket;
|
|
@@ -2,6 +2,7 @@
|
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
3
|
exports.parseFmt = void 0;
|
|
4
4
|
const register_track_1 = require("../../register-track");
|
|
5
|
+
const webcodecs_timescale_1 = require("../../webcodecs-timescale");
|
|
5
6
|
const parseFmt = async ({ state, }) => {
|
|
6
7
|
const { iterator } = state;
|
|
7
8
|
const ckSize = iterator.getUint32Le(); // chunkSize
|
|
@@ -43,9 +44,10 @@ const parseFmt = async ({ state, }) => {
|
|
|
43
44
|
codecEnum: format,
|
|
44
45
|
numberOfChannels,
|
|
45
46
|
sampleRate,
|
|
46
|
-
|
|
47
|
+
originalTimescale: 1000000,
|
|
47
48
|
trackId: 0,
|
|
48
49
|
startInSeconds: 0,
|
|
50
|
+
timescale: webcodecs_timescale_1.WEBCODECS_TIMESCALE,
|
|
49
51
|
},
|
|
50
52
|
container: 'wav',
|
|
51
53
|
registerAudioSampleCallback: state.callbacks.registerAudioSampleCallback,
|
|
@@ -20,19 +20,19 @@ const parseMediaSection = async ({ state, }) => {
|
|
|
20
20
|
const data = iterator.getSlice(toRead);
|
|
21
21
|
const audioSample = (0, convert_audio_or_video_sample_1.convertAudioOrVideoSampleToWebCodecsTimestamps)({
|
|
22
22
|
sample: {
|
|
23
|
-
|
|
24
|
-
dts: timestamp,
|
|
23
|
+
decodingTimestamp: timestamp,
|
|
25
24
|
data,
|
|
26
25
|
duration,
|
|
27
26
|
timestamp,
|
|
28
|
-
trackId: 0,
|
|
29
27
|
type: 'key',
|
|
30
28
|
offset,
|
|
31
|
-
timescale: 1000000,
|
|
32
29
|
},
|
|
33
30
|
timescale: 1,
|
|
34
31
|
});
|
|
35
|
-
await state.callbacks.onAudioSample(
|
|
32
|
+
await state.callbacks.onAudioSample({
|
|
33
|
+
audioSample,
|
|
34
|
+
trackId: 0,
|
|
35
|
+
});
|
|
36
36
|
return null;
|
|
37
37
|
};
|
|
38
38
|
exports.parseMediaSection = parseMediaSection;
|
|
@@ -8,12 +8,18 @@ import type { BlockSegment, SimpleBlockSegment } from './segments/all-segments';
|
|
|
8
8
|
type SampleResult = {
|
|
9
9
|
type: 'video-sample';
|
|
10
10
|
videoSample: MediaParserVideoSample;
|
|
11
|
+
trackId: number;
|
|
12
|
+
timescale: number;
|
|
11
13
|
} | {
|
|
12
14
|
type: 'audio-sample';
|
|
13
15
|
audioSample: MediaParserAudioSample;
|
|
16
|
+
trackId: number;
|
|
17
|
+
timescale: number;
|
|
14
18
|
} | {
|
|
15
19
|
type: 'partial-video-sample';
|
|
16
20
|
partialVideoSample: Omit<MediaParserVideoSample, 'type'>;
|
|
21
|
+
trackId: number;
|
|
22
|
+
timescale: number;
|
|
17
23
|
} | {
|
|
18
24
|
type: 'no-sample';
|
|
19
25
|
};
|
|
@@ -3,6 +3,7 @@ Object.defineProperty(exports, "__esModule", { value: true });
|
|
|
3
3
|
exports.getSampleFromBlock = void 0;
|
|
4
4
|
const buffer_iterator_1 = require("../../iterator/buffer-iterator");
|
|
5
5
|
const register_track_1 = require("../../register-track");
|
|
6
|
+
const webcodecs_timescale_1 = require("../../webcodecs-timescale");
|
|
6
7
|
const parse_avc_1 = require("../avc/parse-avc");
|
|
7
8
|
const get_ready_tracks_1 = require("./get-ready-tracks");
|
|
8
9
|
const all_segments_1 = require("./segments/all-segments");
|
|
@@ -75,19 +76,18 @@ const getSampleFromBlock = async ({ ebml, webmState, offset, structureState, cal
|
|
|
75
76
|
if (codec.startsWith('V_')) {
|
|
76
77
|
const partialVideoSample = {
|
|
77
78
|
data: iterator.getSlice(remainingNow),
|
|
78
|
-
|
|
79
|
-
dts: timecodeInMicroseconds,
|
|
79
|
+
decodingTimestamp: timecodeInMicroseconds,
|
|
80
80
|
duration: undefined,
|
|
81
|
-
trackId: trackNumber,
|
|
82
81
|
timestamp: timecodeInMicroseconds,
|
|
83
82
|
offset,
|
|
84
|
-
timescale,
|
|
85
83
|
};
|
|
86
84
|
if (keyframe === null) {
|
|
87
85
|
iterator.destroy();
|
|
88
86
|
return {
|
|
89
87
|
type: 'partial-video-sample',
|
|
90
88
|
partialVideoSample,
|
|
89
|
+
trackId: trackNumber,
|
|
90
|
+
timescale: webcodecs_timescale_1.WEBCODECS_TIMESCALE,
|
|
91
91
|
};
|
|
92
92
|
}
|
|
93
93
|
await addAvcToTrackAndActivateTrackIfNecessary({
|
|
@@ -109,24 +109,25 @@ const getSampleFromBlock = async ({ ebml, webmState, offset, structureState, cal
|
|
|
109
109
|
return {
|
|
110
110
|
type: 'video-sample',
|
|
111
111
|
videoSample: sample,
|
|
112
|
+
trackId: trackNumber,
|
|
113
|
+
timescale: webcodecs_timescale_1.WEBCODECS_TIMESCALE,
|
|
112
114
|
};
|
|
113
115
|
}
|
|
114
116
|
if (codec.startsWith('A_')) {
|
|
115
117
|
const audioSample = {
|
|
116
118
|
data: iterator.getSlice(remainingNow),
|
|
117
|
-
trackId: trackNumber,
|
|
118
119
|
timestamp: timecodeInMicroseconds,
|
|
119
120
|
type: 'key',
|
|
120
121
|
duration: undefined,
|
|
121
|
-
|
|
122
|
-
dts: timecodeInMicroseconds,
|
|
122
|
+
decodingTimestamp: timecodeInMicroseconds,
|
|
123
123
|
offset,
|
|
124
|
-
timescale,
|
|
125
124
|
};
|
|
126
125
|
iterator.destroy();
|
|
127
126
|
return {
|
|
128
127
|
type: 'audio-sample',
|
|
129
128
|
audioSample,
|
|
129
|
+
trackId: trackNumber,
|
|
130
|
+
timescale: webcodecs_timescale_1.WEBCODECS_TIMESCALE,
|
|
130
131
|
};
|
|
131
132
|
}
|
|
132
133
|
iterator.destroy();
|
|
@@ -3,6 +3,7 @@ Object.defineProperty(exports, "__esModule", { value: true });
|
|
|
3
3
|
exports.getTrack = exports.getMatroskaAudioCodecEnum = exports.NO_CODEC_PRIVATE_SHOULD_BE_DERIVED_FROM_SPS = void 0;
|
|
4
4
|
const buffer_iterator_1 = require("../../iterator/buffer-iterator");
|
|
5
5
|
const make_hvc1_codec_strings_1 = require("../../make-hvc1-codec-strings");
|
|
6
|
+
const webcodecs_timescale_1 = require("../../webcodecs-timescale");
|
|
6
7
|
const color_to_webcodecs_colors_1 = require("../iso-base-media/color-to-webcodecs-colors");
|
|
7
8
|
const av1_codec_private_1 = require("./av1-codec-private");
|
|
8
9
|
const color_1 = require("./color");
|
|
@@ -247,7 +248,7 @@ const getTrack = ({ timescale, track, }) => {
|
|
|
247
248
|
numerator: 1,
|
|
248
249
|
denominator: 1,
|
|
249
250
|
},
|
|
250
|
-
timescale,
|
|
251
|
+
originalTimescale: timescale,
|
|
251
252
|
codedHeight: height.value.value,
|
|
252
253
|
codedWidth: width.value.value,
|
|
253
254
|
displayAspectHeight: displayHeight
|
|
@@ -263,6 +264,7 @@ const getTrack = ({ timescale, track, }) => {
|
|
|
263
264
|
codecEnum,
|
|
264
265
|
fps: null,
|
|
265
266
|
startInSeconds: 0,
|
|
267
|
+
timescale: webcodecs_timescale_1.WEBCODECS_TIMESCALE,
|
|
266
268
|
};
|
|
267
269
|
}
|
|
268
270
|
if ((0, track_entry_1.trackTypeToString)(trackType.value.value) === 'audio') {
|
|
@@ -277,7 +279,7 @@ const getTrack = ({ timescale, track, }) => {
|
|
|
277
279
|
type: 'audio',
|
|
278
280
|
trackId,
|
|
279
281
|
codec: codecString,
|
|
280
|
-
timescale,
|
|
282
|
+
originalTimescale: timescale,
|
|
281
283
|
numberOfChannels,
|
|
282
284
|
sampleRate,
|
|
283
285
|
description: (0, description_1.getAudioDescription)(track),
|
|
@@ -290,6 +292,7 @@ const getTrack = ({ timescale, track, }) => {
|
|
|
290
292
|
track,
|
|
291
293
|
}),
|
|
292
294
|
startInSeconds: 0,
|
|
295
|
+
timescale: webcodecs_timescale_1.WEBCODECS_TIMESCALE,
|
|
293
296
|
};
|
|
294
297
|
}
|
|
295
298
|
return null;
|
|
@@ -158,7 +158,10 @@ const postprocessEbml = async ({ offset, ebml, statesForProcessing: { webmState,
|
|
|
158
158
|
avcState,
|
|
159
159
|
});
|
|
160
160
|
if (sample.type === 'video-sample') {
|
|
161
|
-
await callbacks.onVideoSample(
|
|
161
|
+
await callbacks.onVideoSample({
|
|
162
|
+
videoSample: sample.videoSample,
|
|
163
|
+
trackId: sample.trackId,
|
|
164
|
+
});
|
|
162
165
|
return {
|
|
163
166
|
type: 'Block',
|
|
164
167
|
value: new Uint8Array([]),
|
|
@@ -166,7 +169,10 @@ const postprocessEbml = async ({ offset, ebml, statesForProcessing: { webmState,
|
|
|
166
169
|
};
|
|
167
170
|
}
|
|
168
171
|
if (sample.type === 'audio-sample') {
|
|
169
|
-
await callbacks.onAudioSample(
|
|
172
|
+
await callbacks.onAudioSample({
|
|
173
|
+
audioSample: sample.audioSample,
|
|
174
|
+
trackId: sample.trackId,
|
|
175
|
+
});
|
|
170
176
|
return {
|
|
171
177
|
type: 'Block',
|
|
172
178
|
value: new Uint8Array([]),
|
|
@@ -208,7 +214,10 @@ const postprocessEbml = async ({ offset, ebml, statesForProcessing: { webmState,
|
|
|
208
214
|
...sample.partialVideoSample,
|
|
209
215
|
type: hasReferenceBlock ? 'delta' : 'key',
|
|
210
216
|
};
|
|
211
|
-
await callbacks.onVideoSample(
|
|
217
|
+
await callbacks.onVideoSample({
|
|
218
|
+
videoSample: completeFrame,
|
|
219
|
+
trackId: sample.trackId,
|
|
220
|
+
});
|
|
212
221
|
}
|
|
213
222
|
return {
|
|
214
223
|
type: 'BlockGroup',
|
|
@@ -10,7 +10,7 @@ const getSeekingHintsFromMatroska = (tracksState, keyframesState, webmState) =>
|
|
|
10
10
|
type: 'webm-seeking-hints',
|
|
11
11
|
track: firstVideoTrack
|
|
12
12
|
? {
|
|
13
|
-
timescale: firstVideoTrack.
|
|
13
|
+
timescale: firstVideoTrack.originalTimescale,
|
|
14
14
|
trackId: firstVideoTrack.trackId,
|
|
15
15
|
}
|
|
16
16
|
: null,
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
3
|
exports.convertAudioOrVideoSampleToWebCodecsTimestamps = void 0;
|
|
4
|
-
const
|
|
4
|
+
const webcodecs_timescale_1 = require("./webcodecs-timescale");
|
|
5
5
|
const fixFloat = (value) => {
|
|
6
6
|
if (value % 1 < 0.0000001) {
|
|
7
7
|
return Math.floor(value);
|
|
@@ -12,22 +12,19 @@ const fixFloat = (value) => {
|
|
|
12
12
|
return value;
|
|
13
13
|
};
|
|
14
14
|
const convertAudioOrVideoSampleToWebCodecsTimestamps = ({ sample, timescale, }) => {
|
|
15
|
-
if (timescale ===
|
|
15
|
+
if (timescale === webcodecs_timescale_1.WEBCODECS_TIMESCALE) {
|
|
16
16
|
return sample;
|
|
17
17
|
}
|
|
18
|
-
const {
|
|
18
|
+
const { decodingTimestamp: dts, timestamp } = sample;
|
|
19
19
|
return {
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
timestamp: fixFloat(timestamp * (TARGET_TIMESCALE / timescale)),
|
|
20
|
+
decodingTimestamp: fixFloat(dts * (webcodecs_timescale_1.WEBCODECS_TIMESCALE / timescale)),
|
|
21
|
+
timestamp: fixFloat(timestamp * (webcodecs_timescale_1.WEBCODECS_TIMESCALE / timescale)),
|
|
23
22
|
duration: sample.duration === undefined
|
|
24
23
|
? undefined
|
|
25
|
-
: fixFloat(sample.duration * (
|
|
24
|
+
: fixFloat(sample.duration * (webcodecs_timescale_1.WEBCODECS_TIMESCALE / timescale)),
|
|
26
25
|
data: sample.data,
|
|
27
|
-
trackId: sample.trackId,
|
|
28
26
|
type: sample.type,
|
|
29
27
|
offset: sample.offset,
|
|
30
|
-
timescale: TARGET_TIMESCALE,
|
|
31
28
|
...('avc' in sample ? { avc: sample.avc } : {}),
|
|
32
29
|
};
|
|
33
30
|
};
|
|
@@ -7,6 +7,9 @@ const log_1 = require("./log");
|
|
|
7
7
|
const web_1 = require("./web");
|
|
8
8
|
const downloadAndParseMedia = async (options) => {
|
|
9
9
|
var _a, _b, _c, _d, _e, _f, _g, _h, _j, _k, _l, _m, _o, _p, _q, _r, _s, _t, _u, _v, _w, _x, _y, _z, _0, _1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15, _16;
|
|
10
|
+
if (!options) {
|
|
11
|
+
return Promise.reject(new Error('No options provided. See https://www.remotion.dev/media-parser for how to get started.'));
|
|
12
|
+
}
|
|
10
13
|
const logLevel = (_a = options.logLevel) !== null && _a !== void 0 ? _a : 'info';
|
|
11
14
|
const content = await options.writer.createContent({
|
|
12
15
|
filename: 'hmm',
|