@remotion/media-parser 4.0.304 → 4.0.305
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/containers/aac/parse-aac.js +8 -6
- package/dist/containers/flac/parse-flac-frame.js +6 -6
- package/dist/containers/flac/parse-streaminfo.js +3 -1
- package/dist/containers/iso-base-media/find-keyframe-before-time.js +2 -2
- package/dist/containers/iso-base-media/get-keyframes.js +3 -3
- package/dist/containers/iso-base-media/get-sample-position-bounds.js +2 -2
- package/dist/containers/iso-base-media/get-seeking-byte-from-fragmented-mp4.js +3 -3
- package/dist/containers/iso-base-media/get-seeking-byte.js +1 -1
- package/dist/containers/iso-base-media/make-track.js +7 -3
- package/dist/containers/iso-base-media/mdat/calculate-jump-marks.js +3 -3
- package/dist/containers/iso-base-media/mdat/mdat.js +16 -16
- package/dist/containers/iso-base-media/parse-icc-profile.js +2 -2
- package/dist/containers/m3u/first-sample-in-m3u-chunk.js +2 -1
- package/dist/containers/m3u/sample-sorter.js +2 -2
- package/dist/containers/mp3/parse-mpeg-header.js +7 -2
- package/dist/containers/mp3/seek/audio-sample-from-cbr.js +4 -6
- package/dist/containers/mp3/seek/audio-sample-from-vbr.js +4 -6
- package/dist/containers/riff/convert-queued-sample-to-mediaparser-sample.d.ts +6 -5
- package/dist/containers/riff/convert-queued-sample-to-mediaparser-sample.js +7 -7
- package/dist/containers/riff/get-tracks-from-avi.js +5 -2
- package/dist/containers/riff/parse-movi.js +35 -25
- package/dist/containers/riff/parse-riff-body.js +13 -3
- package/dist/containers/riff/seek/fetch-idx1.js +4 -1
- package/dist/containers/transport-stream/handle-aac-packet.js +8 -7
- package/dist/containers/transport-stream/handle-avc-packet.js +8 -7
- package/dist/containers/wav/parse-fmt.js +3 -1
- package/dist/containers/wav/parse-media-section.js +5 -5
- package/dist/containers/webm/get-sample-from-block.d.ts +6 -0
- package/dist/containers/webm/get-sample-from-block.js +9 -8
- package/dist/containers/webm/make-track.js +5 -2
- package/dist/containers/webm/parse-ebml.js +12 -3
- package/dist/containers/webm/seek/seeking-hints.js +1 -1
- package/dist/convert-audio-or-video-sample.js +6 -9
- package/dist/download-and-parse-media.js +3 -0
- package/dist/esm/index.mjs +266 -196
- package/dist/esm/worker-server-entry.mjs +262 -196
- package/dist/esm/worker-web-entry.mjs +262 -196
- package/dist/get-duration.js +2 -2
- package/dist/get-sample-positions-from-mp4.js +2 -2
- package/dist/get-sample-positions.d.ts +2 -2
- package/dist/get-sample-positions.js +2 -2
- package/dist/get-tracks.d.ts +6 -3
- package/dist/index.d.ts +24 -5
- package/dist/index.js +3 -1
- package/dist/internal-parse-media.js +3 -0
- package/dist/iterator/buffer-manager.js +1 -1
- package/dist/parse-media.js +3 -0
- package/dist/samples-from-moof.js +2 -2
- package/dist/state/iso-base-media/cached-sample-positions.d.ts +2 -2
- package/dist/state/parser-state.d.ts +23 -5
- package/dist/state/riff/queued-frames.d.ts +14 -3
- package/dist/state/riff/queued-frames.js +3 -3
- package/dist/state/riff/sample-counter.d.ts +4 -1
- package/dist/state/riff/sample-counter.js +8 -7
- package/dist/state/riff.d.ts +15 -3
- package/dist/state/sample-callbacks.d.ts +8 -2
- package/dist/state/sample-callbacks.js +5 -4
- package/dist/state/samples-observed/slow-duration-fps.js +7 -6
- package/dist/version.d.ts +1 -1
- package/dist/version.js +1 -1
- package/dist/webcodec-sample-types.d.ts +3 -9
- package/dist/webcodecs-timescale.d.ts +1 -0
- package/dist/webcodecs-timescale.js +4 -0
- package/dist/worker-server.js +2 -1
- package/package.json +3 -3
package/dist/get-duration.js
CHANGED
|
@@ -73,7 +73,7 @@ const getDurationFromIsoBaseMedia = (parserState) => {
|
|
|
73
73
|
}
|
|
74
74
|
const tracks = (0, get_tracks_1.getTracks)(parserState, true);
|
|
75
75
|
const allSamples = tracks.map((t) => {
|
|
76
|
-
const {
|
|
76
|
+
const { originalTimescale: ts } = t;
|
|
77
77
|
const trakBox = (0, traversal_1.getTrakBoxByTrackId)(moovBox, t.trackId);
|
|
78
78
|
if (!trakBox) {
|
|
79
79
|
return null;
|
|
@@ -89,7 +89,7 @@ const getDurationFromIsoBaseMedia = (parserState) => {
|
|
|
89
89
|
if (samplePositions.length === 0) {
|
|
90
90
|
return null;
|
|
91
91
|
}
|
|
92
|
-
const highest = samplePositions === null || samplePositions === void 0 ? void 0 : samplePositions.map((sp) => (sp.
|
|
92
|
+
const highest = samplePositions === null || samplePositions === void 0 ? void 0 : samplePositions.map((sp) => (sp.timestamp + sp.duration) / ts).reduce((a, b) => Math.max(a, b), 0);
|
|
93
93
|
return highest !== null && highest !== void 0 ? highest : 0;
|
|
94
94
|
});
|
|
95
95
|
if (allSamples.every((s) => s === null)) {
|
|
@@ -37,8 +37,8 @@ const getGroupedSamplesPositionsFromMp4 = ({ trakBox, bigEndian, }) => {
|
|
|
37
37
|
}
|
|
38
38
|
samples.push({
|
|
39
39
|
chunk,
|
|
40
|
-
|
|
41
|
-
|
|
40
|
+
timestamp,
|
|
41
|
+
decodingTimestamp: timestamp,
|
|
42
42
|
offset: Number(entry),
|
|
43
43
|
size: stszBox.sampleSize * samplesPerChunk,
|
|
44
44
|
duration: samplesPerChunk,
|
|
@@ -41,8 +41,8 @@ const getSamplePositions = ({ stcoBox, stszBox, stscBox, stssBox, sttsBox, cttsB
|
|
|
41
41
|
offset: Number(chunks[i]) + offsetInThisChunk,
|
|
42
42
|
size,
|
|
43
43
|
isKeyframe,
|
|
44
|
-
dts,
|
|
45
|
-
cts,
|
|
44
|
+
decodingTimestamp: dts,
|
|
45
|
+
timestamp: cts,
|
|
46
46
|
duration: delta,
|
|
47
47
|
chunk: i,
|
|
48
48
|
bigEndian: false,
|
package/dist/get-tracks.d.ts
CHANGED
|
@@ -35,7 +35,8 @@ export type MediaParserVideoTrack = {
|
|
|
35
35
|
height: number;
|
|
36
36
|
rotation: number;
|
|
37
37
|
fps: number | null;
|
|
38
|
-
timescale:
|
|
38
|
+
timescale: 1000000;
|
|
39
|
+
originalTimescale: number;
|
|
39
40
|
advancedColor: MediaParserAdvancedColor;
|
|
40
41
|
m3uStreamFormat: 'ts' | 'mp4' | null;
|
|
41
42
|
startInSeconds: number;
|
|
@@ -48,14 +49,16 @@ export type MediaParserAudioTrack = {
|
|
|
48
49
|
type: 'audio';
|
|
49
50
|
trackId: number;
|
|
50
51
|
codecEnum: MediaParserAudioCodec;
|
|
51
|
-
timescale:
|
|
52
|
+
timescale: 1000000;
|
|
53
|
+
originalTimescale: number;
|
|
52
54
|
codecData: MediaParserCodecData | null;
|
|
53
55
|
startInSeconds: number;
|
|
54
56
|
};
|
|
55
57
|
export type MediaParserOtherTrack = {
|
|
56
58
|
type: 'other';
|
|
57
59
|
trackId: number;
|
|
58
|
-
timescale:
|
|
60
|
+
timescale: 1000000;
|
|
61
|
+
originalTimescale: number;
|
|
59
62
|
trakBox: TrakBox | null;
|
|
60
63
|
startInSeconds: number;
|
|
61
64
|
};
|
package/dist/index.d.ts
CHANGED
|
@@ -778,9 +778,18 @@ export declare const MediaParserInternals: {
|
|
|
778
778
|
registerOnAvcProfileCallback: (callback: (profile: import("./state/parser-state").SpsAndPps) => Promise<void>) => void;
|
|
779
779
|
getNextTrackIndex: () => number;
|
|
780
780
|
queuedBFrames: {
|
|
781
|
-
addFrame: (frame
|
|
781
|
+
addFrame: ({ frame, maxFramesInBuffer, trackId, timescale, }: {
|
|
782
|
+
frame: import("./state/riff/queued-frames").QueuedVideoSample;
|
|
783
|
+
trackId: number;
|
|
784
|
+
maxFramesInBuffer: number;
|
|
785
|
+
timescale: number;
|
|
786
|
+
}) => void;
|
|
782
787
|
flush: () => void;
|
|
783
|
-
getReleasedFrame: () =>
|
|
788
|
+
getReleasedFrame: () => {
|
|
789
|
+
sample: import("./state/riff/queued-frames").QueuedVideoSample;
|
|
790
|
+
trackId: number;
|
|
791
|
+
timescale: number;
|
|
792
|
+
} | null;
|
|
784
793
|
hasReleasedFrames: () => boolean;
|
|
785
794
|
clear: () => void;
|
|
786
795
|
};
|
|
@@ -806,7 +815,10 @@ export declare const MediaParserInternals: {
|
|
|
806
815
|
};
|
|
807
816
|
sampleCounter: {
|
|
808
817
|
onAudioSample: (trackId: number, audioSample: import("./webcodec-sample-types").MediaParserAudioSample) => void;
|
|
809
|
-
onVideoSample: (videoSample:
|
|
818
|
+
onVideoSample: ({ trackId, videoSample, }: {
|
|
819
|
+
videoSample: import("./webcodec-sample-types").MediaParserVideoSample;
|
|
820
|
+
trackId: number;
|
|
821
|
+
}) => void;
|
|
810
822
|
getSampleCountForTrack: ({ trackId }: {
|
|
811
823
|
trackId: number;
|
|
812
824
|
}) => number;
|
|
@@ -1014,8 +1026,14 @@ export declare const MediaParserInternals: {
|
|
|
1014
1026
|
};
|
|
1015
1027
|
callbacks: {
|
|
1016
1028
|
registerVideoSampleCallback: (id: number, callback: import("./webcodec-sample-types").MediaParserOnVideoSample | null) => Promise<void>;
|
|
1017
|
-
onAudioSample: (
|
|
1018
|
-
|
|
1029
|
+
onAudioSample: ({ audioSample, trackId, }: {
|
|
1030
|
+
trackId: number;
|
|
1031
|
+
audioSample: import("./webcodec-sample-types").MediaParserAudioSample;
|
|
1032
|
+
}) => Promise<void>;
|
|
1033
|
+
onVideoSample: ({ trackId, videoSample, }: {
|
|
1034
|
+
trackId: number;
|
|
1035
|
+
videoSample: import("./webcodec-sample-types").MediaParserVideoSample;
|
|
1036
|
+
}) => Promise<void>;
|
|
1019
1037
|
canSkipTracksState: {
|
|
1020
1038
|
doFieldsNeedTracks: () => boolean;
|
|
1021
1039
|
canSkipTracks: () => boolean;
|
|
@@ -1255,6 +1273,7 @@ export { M3uAssociatedPlaylist, M3uStream } from './containers/m3u/get-streams';
|
|
|
1255
1273
|
export { defaultSelectM3uAssociatedPlaylists, defaultSelectM3uStreamFn, SelectM3uAssociatedPlaylistsFn, SelectM3uStreamFn, SelectM3uStreamFnOptions, } from './containers/m3u/select-stream';
|
|
1256
1274
|
export { mediaParserController, MediaParserController, } from './controller/media-parser-controller';
|
|
1257
1275
|
export { VERSION } from './version';
|
|
1276
|
+
export { WEBCODECS_TIMESCALE } from './webcodecs-timescale';
|
|
1258
1277
|
export type { MediaParserSampleAspectRatio } from './get-tracks';
|
|
1259
1278
|
/**
|
|
1260
1279
|
* @deprecated Dont use these yet.
|
package/dist/index.js
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
-
exports.VERSION = exports.mediaParserController = exports.defaultSelectM3uStreamFn = exports.defaultSelectM3uAssociatedPlaylists = exports.MediaParserInternals = exports.downloadAndParseMedia = exports.MediaParserAbortError = exports.IsAPdfError = exports.IsAnUnsupportedFileTypeError = exports.IsAnImageError = exports.hasBeenAborted = exports.parseMedia = void 0;
|
|
3
|
+
exports.WEBCODECS_TIMESCALE = exports.VERSION = exports.mediaParserController = exports.defaultSelectM3uStreamFn = exports.defaultSelectM3uAssociatedPlaylists = exports.MediaParserInternals = exports.downloadAndParseMedia = exports.MediaParserAbortError = exports.IsAPdfError = exports.IsAnUnsupportedFileTypeError = exports.IsAnImageError = exports.hasBeenAborted = exports.parseMedia = void 0;
|
|
4
4
|
const aac_codecprivate_1 = require("./aac-codecprivate");
|
|
5
5
|
const ftyp_1 = require("./containers/iso-base-media/ftyp");
|
|
6
6
|
const mvhd_1 = require("./containers/iso-base-media/mvhd");
|
|
@@ -50,3 +50,5 @@ var media_parser_controller_1 = require("./controller/media-parser-controller");
|
|
|
50
50
|
Object.defineProperty(exports, "mediaParserController", { enumerable: true, get: function () { return media_parser_controller_1.mediaParserController; } });
|
|
51
51
|
var version_1 = require("./version");
|
|
52
52
|
Object.defineProperty(exports, "VERSION", { enumerable: true, get: function () { return version_1.VERSION; } });
|
|
53
|
+
var webcodecs_timescale_1 = require("./webcodecs-timescale");
|
|
54
|
+
Object.defineProperty(exports, "WEBCODECS_TIMESCALE", { enumerable: true, get: function () { return webcodecs_timescale_1.WEBCODECS_TIMESCALE; } });
|
|
@@ -13,6 +13,9 @@ const parser_state_1 = require("./state/parser-state");
|
|
|
13
13
|
const throttled_progress_1 = require("./throttled-progress");
|
|
14
14
|
const internalParseMedia = async function ({ src, fields: _fieldsInReturnValue, reader: readerInterface, onAudioTrack, onVideoTrack, controller = (0, media_parser_controller_1.mediaParserController)(), logLevel, onParseProgress: onParseProgressDoNotCallDirectly, progressIntervalInMs, mode, onDiscardedData, onError, acknowledgeRemotionLicense, apiName, selectM3uStream: selectM3uStreamFn, selectM3uAssociatedPlaylists: selectM3uAssociatedPlaylistsFn, m3uPlaylistContext, makeSamplesStartAtZero, seekingHints, ...more }) {
|
|
15
15
|
var _a;
|
|
16
|
+
if (!src) {
|
|
17
|
+
throw new Error('No "src" provided');
|
|
18
|
+
}
|
|
16
19
|
controller._internals.markAsReadyToEmitEvents();
|
|
17
20
|
(0, remotion_license_acknowledge_1.warnIfRemotionLicenseNotAcknowledged)({
|
|
18
21
|
acknowledgeRemotionLicense,
|
|
@@ -5,7 +5,7 @@ const bufferManager = ({ initialData, maxBytes, counter, }) => {
|
|
|
5
5
|
const buf = new ArrayBuffer(initialData.byteLength, {
|
|
6
6
|
maxByteLength: maxBytes === null
|
|
7
7
|
? initialData.byteLength
|
|
8
|
-
: Math.min(maxBytes, 2 **
|
|
8
|
+
: Math.min(maxBytes, 2 ** 31),
|
|
9
9
|
});
|
|
10
10
|
if (!buf.resize) {
|
|
11
11
|
throw new Error('`ArrayBuffer.resize` is not supported in this Runtime. On the server: Use at least Node.js 20 or Bun. In the browser: Chrome 111, Edge 111, Safari 16.4, Firefox 128, Opera 111');
|
package/dist/parse-media.js
CHANGED
|
@@ -6,6 +6,9 @@ const internal_parse_media_1 = require("./internal-parse-media");
|
|
|
6
6
|
const web_1 = require("./web");
|
|
7
7
|
const parseMedia = (options) => {
|
|
8
8
|
var _a, _b, _c, _d, _e, _f, _g, _h, _j, _k, _l, _m, _o, _p, _q, _r, _s, _t, _u, _v, _w, _x, _y, _z, _0, _1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15, _16;
|
|
9
|
+
if (!options) {
|
|
10
|
+
return Promise.reject(new Error('No options provided. See https://www.remotion.dev/media-parser for how to get started.'));
|
|
11
|
+
}
|
|
9
12
|
return (0, internal_parse_media_1.internalParseMedia)({
|
|
10
13
|
fields: (_a = options.fields) !== null && _a !== void 0 ? _a : null,
|
|
11
14
|
logLevel: (_b = options.logLevel) !== null && _b !== void 0 ? _b : 'info',
|
|
@@ -46,8 +46,8 @@ const getSamplesFromTraf = (trafSegment, moofOffset) => {
|
|
|
46
46
|
const dts = time + ((_f = tfdtBox === null || tfdtBox === void 0 ? void 0 : tfdtBox.baseMediaDecodeTime) !== null && _f !== void 0 ? _f : 0);
|
|
47
47
|
const samplePosition = {
|
|
48
48
|
offset: offset + (moofOffset !== null && moofOffset !== void 0 ? moofOffset : 0) + (dataOffset !== null && dataOffset !== void 0 ? dataOffset : 0),
|
|
49
|
-
dts,
|
|
50
|
-
|
|
49
|
+
decodingTimestamp: dts,
|
|
50
|
+
timestamp: dts + ((_g = sample.sampleCompositionTimeOffset) !== null && _g !== void 0 ? _g : 0),
|
|
51
51
|
duration,
|
|
52
52
|
isKeyframe: keyframe,
|
|
53
53
|
size,
|
|
@@ -9,11 +9,11 @@ export type FlatSample = {
|
|
|
9
9
|
export type MinimalFlatSampleForTesting = {
|
|
10
10
|
track: {
|
|
11
11
|
trackId: number;
|
|
12
|
-
|
|
12
|
+
originalTimescale: number;
|
|
13
13
|
type: 'audio' | 'video' | 'other';
|
|
14
14
|
};
|
|
15
15
|
samplePosition: {
|
|
16
|
-
|
|
16
|
+
decodingTimestamp: number;
|
|
17
17
|
offset: number;
|
|
18
18
|
};
|
|
19
19
|
};
|
|
@@ -46,9 +46,18 @@ export declare const makeParserState: ({ hasAudioTrackHandlers, hasVideoTrackHan
|
|
|
46
46
|
registerOnAvcProfileCallback: (callback: (profile: SpsAndPps) => Promise<void>) => void;
|
|
47
47
|
getNextTrackIndex: () => number;
|
|
48
48
|
queuedBFrames: {
|
|
49
|
-
addFrame: (frame
|
|
49
|
+
addFrame: ({ frame, maxFramesInBuffer, trackId, timescale, }: {
|
|
50
|
+
frame: import("./riff/queued-frames").QueuedVideoSample;
|
|
51
|
+
trackId: number;
|
|
52
|
+
maxFramesInBuffer: number;
|
|
53
|
+
timescale: number;
|
|
54
|
+
}) => void;
|
|
50
55
|
flush: () => void;
|
|
51
|
-
getReleasedFrame: () =>
|
|
56
|
+
getReleasedFrame: () => {
|
|
57
|
+
sample: import("./riff/queued-frames").QueuedVideoSample;
|
|
58
|
+
trackId: number;
|
|
59
|
+
timescale: number;
|
|
60
|
+
} | null;
|
|
52
61
|
hasReleasedFrames: () => boolean;
|
|
53
62
|
clear: () => void;
|
|
54
63
|
};
|
|
@@ -74,7 +83,10 @@ export declare const makeParserState: ({ hasAudioTrackHandlers, hasVideoTrackHan
|
|
|
74
83
|
};
|
|
75
84
|
sampleCounter: {
|
|
76
85
|
onAudioSample: (trackId: number, audioSample: import("../webcodec-sample-types").MediaParserAudioSample) => void;
|
|
77
|
-
onVideoSample: (videoSample:
|
|
86
|
+
onVideoSample: ({ trackId, videoSample, }: {
|
|
87
|
+
videoSample: import("../webcodec-sample-types").MediaParserVideoSample;
|
|
88
|
+
trackId: number;
|
|
89
|
+
}) => void;
|
|
78
90
|
getSampleCountForTrack: ({ trackId }: {
|
|
79
91
|
trackId: number;
|
|
80
92
|
}) => number;
|
|
@@ -282,8 +294,14 @@ export declare const makeParserState: ({ hasAudioTrackHandlers, hasVideoTrackHan
|
|
|
282
294
|
};
|
|
283
295
|
callbacks: {
|
|
284
296
|
registerVideoSampleCallback: (id: number, callback: import("../webcodec-sample-types").MediaParserOnVideoSample | null) => Promise<void>;
|
|
285
|
-
onAudioSample: (
|
|
286
|
-
|
|
297
|
+
onAudioSample: ({ audioSample, trackId, }: {
|
|
298
|
+
trackId: number;
|
|
299
|
+
audioSample: import("../webcodec-sample-types").MediaParserAudioSample;
|
|
300
|
+
}) => Promise<void>;
|
|
301
|
+
onVideoSample: ({ trackId, videoSample, }: {
|
|
302
|
+
trackId: number;
|
|
303
|
+
videoSample: import("../webcodec-sample-types").MediaParserVideoSample;
|
|
304
|
+
}) => Promise<void>;
|
|
287
305
|
canSkipTracksState: {
|
|
288
306
|
doFieldsNeedTracks: () => boolean;
|
|
289
307
|
canSkipTracks: () => boolean;
|
|
@@ -1,9 +1,20 @@
|
|
|
1
1
|
import type { MediaParserVideoSample } from '../../webcodec-sample-types';
|
|
2
|
-
export type QueuedVideoSample = Omit<MediaParserVideoSample, '
|
|
2
|
+
export type QueuedVideoSample = Omit<MediaParserVideoSample, 'decodingTimestamp' | 'timestamp'>;
|
|
3
|
+
type QueueItem = {
|
|
4
|
+
sample: QueuedVideoSample;
|
|
5
|
+
trackId: number;
|
|
6
|
+
timescale: number;
|
|
7
|
+
};
|
|
3
8
|
export declare const queuedBFramesState: () => {
|
|
4
|
-
addFrame: (frame
|
|
9
|
+
addFrame: ({ frame, maxFramesInBuffer, trackId, timescale, }: {
|
|
10
|
+
frame: QueuedVideoSample;
|
|
11
|
+
trackId: number;
|
|
12
|
+
maxFramesInBuffer: number;
|
|
13
|
+
timescale: number;
|
|
14
|
+
}) => void;
|
|
5
15
|
flush: () => void;
|
|
6
|
-
getReleasedFrame: () =>
|
|
16
|
+
getReleasedFrame: () => QueueItem | null;
|
|
7
17
|
hasReleasedFrames: () => boolean;
|
|
8
18
|
clear: () => void;
|
|
9
19
|
};
|
|
20
|
+
export {};
|
|
@@ -9,13 +9,13 @@ const queuedBFramesState = () => {
|
|
|
9
9
|
queuedFrames.length = 0;
|
|
10
10
|
};
|
|
11
11
|
return {
|
|
12
|
-
addFrame: (frame, maxFramesInBuffer) => {
|
|
12
|
+
addFrame: ({ frame, maxFramesInBuffer, trackId, timescale, }) => {
|
|
13
13
|
if (frame.type === 'key') {
|
|
14
14
|
flush();
|
|
15
|
-
releasedFrames.push(frame);
|
|
15
|
+
releasedFrames.push({ sample: frame, trackId, timescale });
|
|
16
16
|
return;
|
|
17
17
|
}
|
|
18
|
-
queuedFrames.push(frame);
|
|
18
|
+
queuedFrames.push({ sample: frame, trackId, timescale });
|
|
19
19
|
if (queuedFrames.length > maxFramesInBuffer) {
|
|
20
20
|
releasedFrames.push(queuedFrames.shift());
|
|
21
21
|
}
|
|
@@ -2,7 +2,10 @@ import type { MediaParserAudioSample, MediaParserVideoSample } from '../../webco
|
|
|
2
2
|
import type { QueuedVideoSample } from './queued-frames';
|
|
3
3
|
export declare const riffSampleCounter: () => {
|
|
4
4
|
onAudioSample: (trackId: number, audioSample: MediaParserAudioSample) => void;
|
|
5
|
-
onVideoSample: (videoSample:
|
|
5
|
+
onVideoSample: ({ trackId, videoSample, }: {
|
|
6
|
+
videoSample: MediaParserVideoSample;
|
|
7
|
+
trackId: number;
|
|
8
|
+
}) => void;
|
|
6
9
|
getSampleCountForTrack: ({ trackId }: {
|
|
7
10
|
trackId: number;
|
|
8
11
|
}) => number;
|
|
@@ -1,6 +1,7 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
3
|
exports.riffSampleCounter = void 0;
|
|
4
|
+
const webcodecs_timescale_1 = require("../../webcodecs-timescale");
|
|
4
5
|
const riff_keyframes_1 = require("./riff-keyframes");
|
|
5
6
|
const riffSampleCounter = () => {
|
|
6
7
|
const samplesForTrack = {};
|
|
@@ -16,22 +17,22 @@ const riffSampleCounter = () => {
|
|
|
16
17
|
}
|
|
17
18
|
samplesForTrack[trackId]++;
|
|
18
19
|
};
|
|
19
|
-
const onVideoSample = (videoSample) => {
|
|
20
|
-
if (typeof samplesForTrack[
|
|
21
|
-
samplesForTrack[
|
|
20
|
+
const onVideoSample = ({ trackId, videoSample, }) => {
|
|
21
|
+
if (typeof samplesForTrack[trackId] === 'undefined') {
|
|
22
|
+
samplesForTrack[trackId] = 0;
|
|
22
23
|
}
|
|
23
24
|
if (videoSample.type === 'key') {
|
|
24
25
|
riffKeys.addKeyframe({
|
|
25
|
-
trackId
|
|
26
|
-
decodingTimeInSeconds: videoSample.
|
|
26
|
+
trackId,
|
|
27
|
+
decodingTimeInSeconds: videoSample.decodingTimestamp / webcodecs_timescale_1.WEBCODECS_TIMESCALE,
|
|
27
28
|
positionInBytes: videoSample.offset,
|
|
28
|
-
presentationTimeInSeconds: videoSample.
|
|
29
|
+
presentationTimeInSeconds: videoSample.timestamp / webcodecs_timescale_1.WEBCODECS_TIMESCALE,
|
|
29
30
|
sizeInBytes: videoSample.data.length,
|
|
30
31
|
sampleCounts: { ...samplesForTrack },
|
|
31
32
|
});
|
|
32
33
|
}
|
|
33
34
|
if (videoSample.data.length > 0) {
|
|
34
|
-
samplesForTrack[
|
|
35
|
+
samplesForTrack[trackId]++;
|
|
35
36
|
}
|
|
36
37
|
};
|
|
37
38
|
const getSampleCountForTrack = ({ trackId }) => {
|
package/dist/state/riff.d.ts
CHANGED
|
@@ -17,9 +17,18 @@ export declare const riffSpecificState: ({ controller, logLevel, readerInterface
|
|
|
17
17
|
registerOnAvcProfileCallback: (callback: AvcProfileInfoCallback) => void;
|
|
18
18
|
getNextTrackIndex: () => number;
|
|
19
19
|
queuedBFrames: {
|
|
20
|
-
addFrame: (frame
|
|
20
|
+
addFrame: ({ frame, maxFramesInBuffer, trackId, timescale, }: {
|
|
21
|
+
frame: import("./riff/queued-frames").QueuedVideoSample;
|
|
22
|
+
trackId: number;
|
|
23
|
+
maxFramesInBuffer: number;
|
|
24
|
+
timescale: number;
|
|
25
|
+
}) => void;
|
|
21
26
|
flush: () => void;
|
|
22
|
-
getReleasedFrame: () =>
|
|
27
|
+
getReleasedFrame: () => {
|
|
28
|
+
sample: import("./riff/queued-frames").QueuedVideoSample;
|
|
29
|
+
trackId: number;
|
|
30
|
+
timescale: number;
|
|
31
|
+
} | null;
|
|
23
32
|
hasReleasedFrames: () => boolean;
|
|
24
33
|
clear: () => void;
|
|
25
34
|
};
|
|
@@ -45,7 +54,10 @@ export declare const riffSpecificState: ({ controller, logLevel, readerInterface
|
|
|
45
54
|
};
|
|
46
55
|
sampleCounter: {
|
|
47
56
|
onAudioSample: (trackId: number, audioSample: import("..").MediaParserAudioSample) => void;
|
|
48
|
-
onVideoSample: (videoSample:
|
|
57
|
+
onVideoSample: ({ trackId, videoSample, }: {
|
|
58
|
+
videoSample: import("..").MediaParserVideoSample;
|
|
59
|
+
trackId: number;
|
|
60
|
+
}) => void;
|
|
49
61
|
getSampleCountForTrack: ({ trackId }: {
|
|
50
62
|
trackId: number;
|
|
51
63
|
}) => number;
|
|
@@ -21,8 +21,14 @@ export declare const callbacksState: ({ controller, hasAudioTrackHandlers, hasVi
|
|
|
21
21
|
logLevel: MediaParserLogLevel;
|
|
22
22
|
}) => {
|
|
23
23
|
registerVideoSampleCallback: (id: number, callback: MediaParserOnVideoSample | null) => Promise<void>;
|
|
24
|
-
onAudioSample: (
|
|
25
|
-
|
|
24
|
+
onAudioSample: ({ audioSample, trackId, }: {
|
|
25
|
+
trackId: number;
|
|
26
|
+
audioSample: MediaParserAudioSample;
|
|
27
|
+
}) => Promise<void>;
|
|
28
|
+
onVideoSample: ({ trackId, videoSample, }: {
|
|
29
|
+
trackId: number;
|
|
30
|
+
videoSample: MediaParserVideoSample;
|
|
31
|
+
}) => Promise<void>;
|
|
26
32
|
canSkipTracksState: {
|
|
27
33
|
doFieldsNeedTracks: () => boolean;
|
|
28
34
|
canSkipTracks: () => boolean;
|
|
@@ -2,6 +2,7 @@
|
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
3
|
exports.callbacksState = void 0;
|
|
4
4
|
const log_1 = require("../log");
|
|
5
|
+
const webcodecs_timescale_1 = require("../webcodecs-timescale");
|
|
5
6
|
const can_skip_tracks_1 = require("./can-skip-tracks");
|
|
6
7
|
const has_tracks_section_1 = require("./has-tracks-section");
|
|
7
8
|
const need_samples_for_fields_1 = require("./need-samples-for-fields");
|
|
@@ -30,7 +31,7 @@ const callbacksState = ({ controller, hasAudioTrackHandlers, hasVideoTrackHandle
|
|
|
30
31
|
}
|
|
31
32
|
queuedVideoSamples[id] = [];
|
|
32
33
|
},
|
|
33
|
-
onAudioSample: async (trackId,
|
|
34
|
+
onAudioSample: async ({ audioSample, trackId, }) => {
|
|
34
35
|
if (controller._internals.signal.aborted) {
|
|
35
36
|
throw new Error('Aborted');
|
|
36
37
|
}
|
|
@@ -50,7 +51,7 @@ const callbacksState = ({ controller, hasAudioTrackHandlers, hasVideoTrackHandle
|
|
|
50
51
|
samplesObserved.addAudioSample(audioSample);
|
|
51
52
|
}
|
|
52
53
|
},
|
|
53
|
-
onVideoSample: async (trackId, videoSample) => {
|
|
54
|
+
onVideoSample: async ({ trackId, videoSample, }) => {
|
|
54
55
|
if (controller._internals.signal.aborted) {
|
|
55
56
|
throw new Error('Aborted');
|
|
56
57
|
}
|
|
@@ -69,9 +70,9 @@ const callbacksState = ({ controller, hasAudioTrackHandlers, hasVideoTrackHandle
|
|
|
69
70
|
if (videoSample.type === 'key') {
|
|
70
71
|
keyframes.addKeyframe({
|
|
71
72
|
trackId,
|
|
72
|
-
decodingTimeInSeconds: videoSample.
|
|
73
|
+
decodingTimeInSeconds: videoSample.decodingTimestamp / webcodecs_timescale_1.WEBCODECS_TIMESCALE,
|
|
73
74
|
positionInBytes: videoSample.offset,
|
|
74
|
-
presentationTimeInSeconds: videoSample.
|
|
75
|
+
presentationTimeInSeconds: videoSample.timestamp / webcodecs_timescale_1.WEBCODECS_TIMESCALE,
|
|
75
76
|
sizeInBytes: videoSample.data.length,
|
|
76
77
|
});
|
|
77
78
|
}
|
|
@@ -1,6 +1,7 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
3
|
exports.samplesObservedState = void 0;
|
|
4
|
+
const webcodecs_timescale_1 = require("../../webcodecs-timescale");
|
|
4
5
|
const samplesObservedState = () => {
|
|
5
6
|
let smallestVideoSample;
|
|
6
7
|
let largestVideoSample;
|
|
@@ -25,9 +26,9 @@ const samplesObservedState = () => {
|
|
|
25
26
|
};
|
|
26
27
|
const addVideoSample = (videoSample) => {
|
|
27
28
|
var _a;
|
|
28
|
-
videoSamples.set(videoSample.
|
|
29
|
-
const presentationTimeInSeconds = videoSample.
|
|
30
|
-
const duration = ((_a = videoSample.duration) !== null && _a !== void 0 ? _a : 0) /
|
|
29
|
+
videoSamples.set(videoSample.timestamp, videoSample.data.byteLength);
|
|
30
|
+
const presentationTimeInSeconds = videoSample.timestamp / webcodecs_timescale_1.WEBCODECS_TIMESCALE;
|
|
31
|
+
const duration = ((_a = videoSample.duration) !== null && _a !== void 0 ? _a : 0) / webcodecs_timescale_1.WEBCODECS_TIMESCALE;
|
|
31
32
|
if (largestVideoSample === undefined ||
|
|
32
33
|
presentationTimeInSeconds > largestVideoSample) {
|
|
33
34
|
largestVideoSample = presentationTimeInSeconds + duration;
|
|
@@ -39,9 +40,9 @@ const samplesObservedState = () => {
|
|
|
39
40
|
};
|
|
40
41
|
const addAudioSample = (audioSample) => {
|
|
41
42
|
var _a;
|
|
42
|
-
audioSamples.set(audioSample.
|
|
43
|
-
const presentationTimeInSeconds = audioSample.
|
|
44
|
-
const duration = ((_a = audioSample.duration) !== null && _a !== void 0 ? _a : 0) /
|
|
43
|
+
audioSamples.set(audioSample.timestamp, audioSample.data.byteLength);
|
|
44
|
+
const presentationTimeInSeconds = audioSample.timestamp / webcodecs_timescale_1.WEBCODECS_TIMESCALE;
|
|
45
|
+
const duration = ((_a = audioSample.duration) !== null && _a !== void 0 ? _a : 0) / webcodecs_timescale_1.WEBCODECS_TIMESCALE;
|
|
45
46
|
if (largestAudioSample === undefined ||
|
|
46
47
|
presentationTimeInSeconds > largestAudioSample) {
|
|
47
48
|
largestAudioSample = presentationTimeInSeconds + duration;
|
package/dist/version.d.ts
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
export declare const VERSION = "4.0.
|
|
1
|
+
export declare const VERSION = "4.0.305";
|
package/dist/version.js
CHANGED
|
@@ -16,12 +16,9 @@ export type MediaParserAudioSample = {
|
|
|
16
16
|
data: Uint8Array;
|
|
17
17
|
timestamp: number;
|
|
18
18
|
duration: number | undefined;
|
|
19
|
-
trackId: number;
|
|
20
19
|
type: 'key' | 'delta';
|
|
21
|
-
|
|
22
|
-
dts: number;
|
|
20
|
+
decodingTimestamp: number;
|
|
23
21
|
offset: number;
|
|
24
|
-
timescale: number;
|
|
25
22
|
};
|
|
26
23
|
export type MediaParserAvcKeyframeInfo = {
|
|
27
24
|
type: 'keyframe';
|
|
@@ -36,12 +33,9 @@ export type MediaParserAvcExtraInfo = MediaParserAvcKeyframeInfo | MediaParserAv
|
|
|
36
33
|
export type MediaParserVideoSample = {
|
|
37
34
|
data: Uint8Array;
|
|
38
35
|
timestamp: number;
|
|
39
|
-
duration: number | undefined;
|
|
40
|
-
trackId: number;
|
|
41
36
|
type: 'key' | 'delta';
|
|
42
|
-
|
|
43
|
-
|
|
37
|
+
duration: number | undefined;
|
|
38
|
+
decodingTimestamp: number;
|
|
44
39
|
offset: number;
|
|
45
|
-
timescale: number;
|
|
46
40
|
avc?: MediaParserAvcExtraInfo;
|
|
47
41
|
};
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export declare const WEBCODECS_TIMESCALE = 1000000;
|
package/dist/worker-server.js
CHANGED
|
@@ -12,7 +12,8 @@ const post = (message) => {
|
|
|
12
12
|
};
|
|
13
13
|
const controller = (0, media_parser_controller_1.mediaParserController)();
|
|
14
14
|
const executeCallback = (payload) => {
|
|
15
|
-
|
|
15
|
+
// safari doesn't support crypto.randomUUID()
|
|
16
|
+
const nonce = String(Math.random());
|
|
16
17
|
const { promise, resolve, reject } = (0, with_resolvers_1.withResolvers)();
|
|
17
18
|
const cb = (msg) => {
|
|
18
19
|
const data = msg.data;
|
package/package.json
CHANGED
|
@@ -3,15 +3,15 @@
|
|
|
3
3
|
"url": "https://github.com/remotion-dev/remotion/tree/main/packages/media-parser"
|
|
4
4
|
},
|
|
5
5
|
"name": "@remotion/media-parser",
|
|
6
|
-
"version": "4.0.
|
|
6
|
+
"version": "4.0.305",
|
|
7
7
|
"main": "dist/index.js",
|
|
8
8
|
"sideEffects": false,
|
|
9
9
|
"devDependencies": {
|
|
10
10
|
"@types/wicg-file-system-access": "2023.10.5",
|
|
11
11
|
"eslint": "9.19.0",
|
|
12
12
|
"@types/bun": "1.2.8",
|
|
13
|
-
"@remotion/example-videos": "4.0.
|
|
14
|
-
"@remotion/eslint-config-internal": "4.0.
|
|
13
|
+
"@remotion/example-videos": "4.0.305",
|
|
14
|
+
"@remotion/eslint-config-internal": "4.0.305"
|
|
15
15
|
},
|
|
16
16
|
"publishConfig": {
|
|
17
17
|
"access": "public"
|