@remotion/media-parser 4.0.230 → 4.0.232
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/add-avc-profile-to-track.d.ts +3 -0
- package/dist/add-avc-profile-to-track.js +35 -0
- package/dist/add-new-matroska-tracks.d.ts +6 -1
- package/dist/add-new-matroska-tracks.js +16 -1
- package/dist/boxes/avc/parse-avc.d.ts +18 -0
- package/dist/boxes/avc/parse-avc.js +96 -0
- package/dist/boxes/iso-base-media/make-track.js +3 -3
- package/dist/boxes/iso-base-media/mdat/mdat.d.ts +2 -2
- package/dist/boxes/iso-base-media/mdat/mdat.js +5 -2
- package/dist/boxes/iso-base-media/moov/moov.js +2 -2
- package/dist/boxes/iso-base-media/process-box.d.ts +5 -5
- package/dist/boxes/iso-base-media/process-box.js +38 -37
- package/dist/boxes/iso-base-media/stsd/mebx.js +2 -2
- package/dist/boxes/iso-base-media/stsd/samples.d.ts +2 -2
- package/dist/boxes/iso-base-media/stsd/samples.js +9 -9
- package/dist/boxes/iso-base-media/trak/trak.js +2 -2
- package/dist/boxes/iso-base-media/traversal.d.ts +1 -1
- package/dist/boxes/riff/expect-riff-box.d.ts +16 -0
- package/dist/boxes/riff/expect-riff-box.js +49 -0
- package/dist/boxes/riff/get-tracks-from-avi.d.ts +21 -0
- package/dist/boxes/riff/get-tracks-from-avi.js +108 -0
- package/dist/boxes/riff/is-movi.d.ts +2 -0
- package/dist/boxes/riff/is-movi.js +12 -0
- package/dist/boxes/riff/parse-avih.d.ts +6 -0
- package/dist/boxes/riff/parse-avih.js +32 -0
- package/dist/boxes/riff/parse-box.d.ts +13 -0
- package/dist/boxes/riff/parse-box.js +113 -0
- package/dist/boxes/riff/parse-fmt-box.d.ts +7 -0
- package/dist/boxes/riff/parse-fmt-box.js +33 -0
- package/dist/boxes/riff/parse-list-box.d.ts +8 -0
- package/dist/boxes/riff/parse-list-box.js +30 -0
- package/dist/boxes/riff/parse-movi.d.ts +17 -0
- package/dist/boxes/riff/parse-movi.js +122 -0
- package/dist/boxes/riff/parse-riff-box.d.ts +10 -0
- package/dist/boxes/riff/parse-riff-box.js +33 -0
- package/dist/boxes/riff/parse-strf.d.ts +7 -0
- package/dist/boxes/riff/parse-strf.js +67 -0
- package/dist/boxes/riff/parse-strh.d.ts +6 -0
- package/dist/boxes/riff/parse-strh.js +46 -0
- package/dist/boxes/riff/riff-box.d.ts +81 -0
- package/dist/boxes/riff/riff-box.js +2 -0
- package/dist/boxes/riff/strf.d.ts +7 -0
- package/dist/boxes/riff/strf.js +67 -0
- package/dist/boxes/riff/timescale.d.ts +1 -0
- package/dist/boxes/riff/timescale.js +4 -0
- package/dist/boxes/riff/traversal.d.ts +8 -0
- package/dist/boxes/riff/traversal.js +36 -0
- package/dist/boxes/webm/parse-ebml.js +2 -2
- package/dist/boxes/webm/parse-webm-header.d.ts +2 -2
- package/dist/boxes/webm/parse-webm-header.js +23 -4
- package/dist/boxes/webm/segments/parse-children.d.ts +12 -7
- package/dist/boxes/webm/segments/parse-children.js +67 -57
- package/dist/boxes/webm/segments.d.ts +8 -3
- package/dist/boxes/webm/segments.js +70 -39
- package/dist/boxes/webm/traversal.d.ts +2 -2
- package/dist/buffer-iterator.d.ts +6 -1
- package/dist/buffer-iterator.js +24 -5
- package/dist/create/iso-base-media/create-iso-base-media.d.ts +1 -1
- package/dist/create/iso-base-media/create-iso-base-media.js +4 -9
- package/dist/create/matroska/cluster.d.ts +7 -1
- package/dist/create/matroska/cluster.js +8 -5
- package/dist/create/matroska/create-matroska-media.d.ts +1 -1
- package/dist/create/matroska/create-matroska-media.js +27 -14
- package/dist/create/media-fn.d.ts +1 -1
- package/dist/create/mp3/create-mp3.d.ts +2 -0
- package/dist/create/mp3/create-mp3.js +49 -0
- package/dist/create/wav/create-wav.d.ts +2 -0
- package/dist/create/wav/create-wav.js +108 -0
- package/dist/emit-available-info.d.ts +2 -2
- package/dist/emit-available-info.js +28 -13
- package/dist/esm/buffer.mjs +2 -2
- package/dist/esm/from-node.mjs +2 -1
- package/dist/esm/index.mjs +1513 -331
- package/dist/esm/web-fs.mjs +2 -2
- package/dist/get-audio-codec.d.ts +3 -3
- package/dist/get-audio-codec.js +1 -6
- package/dist/get-container.d.ts +3 -3
- package/dist/get-container.js +9 -7
- package/dist/get-dimensions.d.ts +3 -3
- package/dist/get-duration.d.ts +8 -3
- package/dist/get-duration.js +37 -15
- package/dist/get-fps.d.ts +3 -3
- package/dist/get-fps.js +36 -2
- package/dist/get-tracks.d.ts +4 -7
- package/dist/get-tracks.js +55 -27
- package/dist/get-video-codec.d.ts +5 -4
- package/dist/get-video-codec.js +39 -15
- package/dist/has-all-info.d.ts +2 -2
- package/dist/has-all-info.js +9 -9
- package/dist/index.d.ts +5 -3
- package/dist/index.js +5 -1
- package/dist/options.d.ts +17 -10
- package/dist/parse-media.js +43 -14
- package/dist/parse-result.d.ts +35 -6
- package/dist/parse-video.d.ts +3 -3
- package/dist/parse-video.js +8 -16
- package/dist/parser-context.d.ts +1 -0
- package/dist/parser-state.d.ts +11 -0
- package/dist/parser-state.js +30 -0
- package/dist/readers/from-node.js +2 -1
- package/dist/readers/reader.d.ts +2 -2
- package/dist/register-track.d.ts +13 -0
- package/dist/register-track.js +25 -0
- package/dist/version.d.ts +1 -0
- package/dist/version.js +5 -0
- package/dist/writers/buffer-implementation/writer.d.ts +2 -2
- package/dist/writers/buffer-implementation/writer.js +2 -2
- package/dist/writers/web-fs.js +2 -3
- package/dist/writers/writer.d.ts +5 -3
- package/package.json +3 -3
|
@@ -3,26 +3,44 @@ Object.defineProperty(exports, "__esModule", { value: true });
|
|
|
3
3
|
exports.expectSegment = void 0;
|
|
4
4
|
const parse_ebml_1 = require("./parse-ebml");
|
|
5
5
|
const parse_children_1 = require("./segments/parse-children");
|
|
6
|
-
const
|
|
7
|
-
if (
|
|
8
|
-
throw new Error(
|
|
6
|
+
const continueAfterMatroskaParseResult = async ({ result, iterator, parserContext, segment, }) => {
|
|
7
|
+
if (result.status === 'done') {
|
|
8
|
+
throw new Error('Should not continue after done');
|
|
9
9
|
}
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
10
|
+
const proceeded = await result.continueParsing();
|
|
11
|
+
if (proceeded.status === 'done') {
|
|
12
|
+
return {
|
|
13
|
+
status: 'done',
|
|
14
|
+
segment,
|
|
15
|
+
};
|
|
16
|
+
}
|
|
17
|
+
return {
|
|
18
|
+
continueParsing() {
|
|
19
|
+
return continueAfterMatroskaParseResult({
|
|
20
|
+
result: proceeded,
|
|
21
|
+
iterator,
|
|
22
|
+
parserContext,
|
|
23
|
+
segment,
|
|
24
|
+
});
|
|
25
|
+
},
|
|
26
|
+
segment: null,
|
|
27
|
+
status: 'incomplete',
|
|
28
|
+
};
|
|
15
29
|
};
|
|
16
|
-
const expectSegment = async (iterator, parserContext) => {
|
|
17
|
-
|
|
30
|
+
const expectSegment = async ({ iterator, parserContext, offset, children, }) => {
|
|
31
|
+
iterator.counter.decrement(iterator.counter.getOffset() - offset);
|
|
18
32
|
if (iterator.bytesRemaining() === 0) {
|
|
19
33
|
return {
|
|
20
34
|
status: 'incomplete',
|
|
21
|
-
segments: [],
|
|
22
35
|
continueParsing: () => {
|
|
23
|
-
return
|
|
36
|
+
return (0, parse_children_1.expectAndProcessSegment)({
|
|
37
|
+
iterator,
|
|
38
|
+
parserContext,
|
|
39
|
+
offset,
|
|
40
|
+
children,
|
|
41
|
+
});
|
|
24
42
|
},
|
|
25
|
-
|
|
43
|
+
segment: null,
|
|
26
44
|
};
|
|
27
45
|
}
|
|
28
46
|
const segmentId = iterator.getMatroskaSegmentId();
|
|
@@ -30,11 +48,15 @@ const expectSegment = async (iterator, parserContext) => {
|
|
|
30
48
|
iterator.counter.decrement(iterator.counter.getOffset() - offset);
|
|
31
49
|
return {
|
|
32
50
|
status: 'incomplete',
|
|
33
|
-
segments: [],
|
|
34
51
|
continueParsing: () => {
|
|
35
|
-
return
|
|
52
|
+
return (0, parse_children_1.expectAndProcessSegment)({
|
|
53
|
+
iterator,
|
|
54
|
+
parserContext,
|
|
55
|
+
offset,
|
|
56
|
+
children,
|
|
57
|
+
});
|
|
36
58
|
},
|
|
37
|
-
|
|
59
|
+
segment: null,
|
|
38
60
|
};
|
|
39
61
|
}
|
|
40
62
|
const offsetBeforeVInt = iterator.counter.getOffset();
|
|
@@ -44,43 +66,43 @@ const expectSegment = async (iterator, parserContext) => {
|
|
|
44
66
|
iterator.counter.decrement(iterator.counter.getOffset() - offset);
|
|
45
67
|
return {
|
|
46
68
|
status: 'incomplete',
|
|
47
|
-
segments: [],
|
|
48
69
|
continueParsing: () => {
|
|
49
|
-
return
|
|
70
|
+
return (0, exports.expectSegment)({ iterator, parserContext, offset, children });
|
|
50
71
|
},
|
|
51
|
-
|
|
72
|
+
segment: null,
|
|
52
73
|
};
|
|
53
74
|
}
|
|
54
75
|
const bytesRemainingNow = iterator.byteLength() - iterator.counter.getOffset();
|
|
55
76
|
if (segmentId === '0x18538067' || segmentId === '0x1f43b675') {
|
|
77
|
+
const newSegment = {
|
|
78
|
+
type: segmentId === '0x18538067' ? 'Segment' : 'Cluster',
|
|
79
|
+
minVintWidth: offsetAfterVInt - offsetBeforeVInt,
|
|
80
|
+
value: [],
|
|
81
|
+
};
|
|
56
82
|
const main = await (0, parse_children_1.expectChildren)({
|
|
57
83
|
iterator,
|
|
58
84
|
length,
|
|
59
|
-
|
|
60
|
-
wrap: segmentId === '0x18538067'
|
|
61
|
-
? (s) => ({
|
|
62
|
-
type: 'Segment',
|
|
63
|
-
value: s,
|
|
64
|
-
minVintWidth: offsetAfterVInt - offsetBeforeVInt,
|
|
65
|
-
})
|
|
66
|
-
: (s) => ({
|
|
67
|
-
type: 'Cluster',
|
|
68
|
-
value: s,
|
|
69
|
-
minVintWidth: offsetAfterVInt - offsetBeforeVInt,
|
|
70
|
-
}),
|
|
85
|
+
children: newSegment.value,
|
|
71
86
|
parserContext,
|
|
87
|
+
startOffset: iterator.counter.getOffset(),
|
|
72
88
|
});
|
|
73
89
|
if (main.status === 'incomplete') {
|
|
74
90
|
return {
|
|
75
91
|
status: 'incomplete',
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
|
|
92
|
+
continueParsing: () => {
|
|
93
|
+
return continueAfterMatroskaParseResult({
|
|
94
|
+
iterator,
|
|
95
|
+
parserContext,
|
|
96
|
+
result: main,
|
|
97
|
+
segment: newSegment,
|
|
98
|
+
});
|
|
99
|
+
},
|
|
100
|
+
segment: newSegment,
|
|
79
101
|
};
|
|
80
102
|
}
|
|
81
103
|
return {
|
|
82
104
|
status: 'done',
|
|
83
|
-
|
|
105
|
+
segment: newSegment,
|
|
84
106
|
};
|
|
85
107
|
}
|
|
86
108
|
if (bytesRemainingNow < length) {
|
|
@@ -88,11 +110,10 @@ const expectSegment = async (iterator, parserContext) => {
|
|
|
88
110
|
iterator.counter.decrement(bytesRead);
|
|
89
111
|
return {
|
|
90
112
|
status: 'incomplete',
|
|
91
|
-
|
|
113
|
+
segment: null,
|
|
92
114
|
continueParsing: () => {
|
|
93
|
-
return
|
|
115
|
+
return (0, exports.expectSegment)({ iterator, parserContext, offset, children });
|
|
94
116
|
},
|
|
95
|
-
skipTo: null,
|
|
96
117
|
};
|
|
97
118
|
}
|
|
98
119
|
const segment = await parseSegment({
|
|
@@ -104,7 +125,17 @@ const expectSegment = async (iterator, parserContext) => {
|
|
|
104
125
|
});
|
|
105
126
|
return {
|
|
106
127
|
status: 'done',
|
|
107
|
-
|
|
128
|
+
segment,
|
|
108
129
|
};
|
|
109
130
|
};
|
|
110
131
|
exports.expectSegment = expectSegment;
|
|
132
|
+
const parseSegment = async ({ segmentId, iterator, length, parserContext, headerReadSoFar, }) => {
|
|
133
|
+
if (length < 0) {
|
|
134
|
+
throw new Error(`Expected length of ${segmentId} to be greater or equal 0`);
|
|
135
|
+
}
|
|
136
|
+
iterator.counter.decrement(headerReadSoFar);
|
|
137
|
+
const offset = iterator.counter.getOffset();
|
|
138
|
+
const ebml = await (0, parse_ebml_1.parseEbml)(iterator, parserContext);
|
|
139
|
+
const remapped = await (0, parse_ebml_1.postprocessEbml)({ offset, ebml, parserContext });
|
|
140
|
+
return remapped;
|
|
141
|
+
};
|
|
@@ -1,6 +1,6 @@
|
|
|
1
|
-
import type {
|
|
1
|
+
import type { MatroskaSegment } from './segments';
|
|
2
2
|
import type { AudioSegment, ClusterSegment, CodecIdSegment, ColourSegment, DisplayHeightSegment, DisplayWidthSegment, HeightSegment, MainSegment, MatrixCoefficientsSegment, PrimariesSegment, RangeSegment, TimestampScaleSegment, TrackEntry, TrackTypeSegment, TransferCharacteristicsSegment, VideoSegment, WidthSegment } from './segments/all-segments';
|
|
3
|
-
export declare const getMainSegment: (segments:
|
|
3
|
+
export declare const getMainSegment: (segments: MatroskaSegment[]) => MainSegment | null;
|
|
4
4
|
export declare const getTrackNumber: (track: TrackEntry) => import("./segments/all-segments").UintWithSize | null;
|
|
5
5
|
export declare const getTrackCodec: (track: TrackEntry) => CodecIdSegment | null;
|
|
6
6
|
export declare const getTrackTimestampScale: (track: TrackEntry) => import("./segments/all-segments").FloatWithSize | null;
|
|
@@ -37,9 +37,10 @@ export declare const getArrayBufferIterator: (initialData: Uint8Array, maxBytes:
|
|
|
37
37
|
getEBML: () => number;
|
|
38
38
|
getInt8: () => number;
|
|
39
39
|
getUint16: () => number;
|
|
40
|
+
getUint16Le: () => number;
|
|
40
41
|
getUint24: () => number;
|
|
41
42
|
getInt16: () => number;
|
|
42
|
-
getUint32: (
|
|
43
|
+
getUint32: () => number;
|
|
43
44
|
getUint64: (littleEndian?: boolean) => bigint;
|
|
44
45
|
getFixedPointUnsigned1616Number: () => number;
|
|
45
46
|
getFixedPointSigned1616Number: () => number;
|
|
@@ -56,5 +57,9 @@ export declare const getArrayBufferIterator: (initialData: Uint8Array, maxBytes:
|
|
|
56
57
|
isMp3: () => boolean;
|
|
57
58
|
disallowDiscard: () => void;
|
|
58
59
|
allowDiscard: () => void;
|
|
60
|
+
startBox: (size: number) => {
|
|
61
|
+
discardRest: () => void;
|
|
62
|
+
expectNoMoreBytes: () => void;
|
|
63
|
+
};
|
|
59
64
|
};
|
|
60
65
|
export type BufferIterator = ReturnType<typeof getArrayBufferIterator>;
|
package/dist/buffer-iterator.js
CHANGED
|
@@ -89,6 +89,9 @@ const getArrayBufferIterator = (initialData, maxBytes) => {
|
|
|
89
89
|
const allowDiscard = () => {
|
|
90
90
|
discardAllowed = true;
|
|
91
91
|
};
|
|
92
|
+
const discard = (length) => {
|
|
93
|
+
counter.increment(length);
|
|
94
|
+
};
|
|
92
95
|
const getUint8 = () => {
|
|
93
96
|
const val = view.getUint8(counter.getDiscardedOffset());
|
|
94
97
|
counter.increment(1);
|
|
@@ -142,8 +145,8 @@ const getArrayBufferIterator = (initialData, maxBytes) => {
|
|
|
142
145
|
}
|
|
143
146
|
return lastInt;
|
|
144
147
|
};
|
|
145
|
-
const getUint32 = (
|
|
146
|
-
const val = view.getUint32(counter.getDiscardedOffset()
|
|
148
|
+
const getUint32 = () => {
|
|
149
|
+
const val = view.getUint32(counter.getDiscardedOffset());
|
|
147
150
|
counter.increment(4);
|
|
148
151
|
return val;
|
|
149
152
|
};
|
|
@@ -152,6 +155,18 @@ const getArrayBufferIterator = (initialData, maxBytes) => {
|
|
|
152
155
|
counter.increment(8);
|
|
153
156
|
return val;
|
|
154
157
|
};
|
|
158
|
+
const startBox = (size) => {
|
|
159
|
+
const startOffset = counter.getOffset();
|
|
160
|
+
return {
|
|
161
|
+
discardRest: () => discard(size - (counter.getOffset() - startOffset)),
|
|
162
|
+
expectNoMoreBytes: () => {
|
|
163
|
+
const remaining = size - (counter.getOffset() - startOffset);
|
|
164
|
+
if (remaining !== 0) {
|
|
165
|
+
throw new Error('expected 0 bytes, got ' + remaining);
|
|
166
|
+
}
|
|
167
|
+
},
|
|
168
|
+
};
|
|
169
|
+
};
|
|
155
170
|
const getUint32Le = () => {
|
|
156
171
|
const val = view.getUint32(counter.getDiscardedOffset(), true);
|
|
157
172
|
counter.increment(4);
|
|
@@ -300,9 +315,7 @@ const getArrayBufferIterator = (initialData, maxBytes) => {
|
|
|
300
315
|
leb128,
|
|
301
316
|
removeBytesRead,
|
|
302
317
|
isWebm,
|
|
303
|
-
discard
|
|
304
|
-
counter.increment(length);
|
|
305
|
-
},
|
|
318
|
+
discard,
|
|
306
319
|
getEightByteNumber,
|
|
307
320
|
getFourByteNumber,
|
|
308
321
|
getSlice,
|
|
@@ -414,6 +427,11 @@ const getArrayBufferIterator = (initialData, maxBytes) => {
|
|
|
414
427
|
counter.increment(2);
|
|
415
428
|
return val;
|
|
416
429
|
},
|
|
430
|
+
getUint16Le: () => {
|
|
431
|
+
const val = view.getUint16(counter.getDiscardedOffset(), true);
|
|
432
|
+
counter.increment(2);
|
|
433
|
+
return val;
|
|
434
|
+
},
|
|
417
435
|
getUint24: () => {
|
|
418
436
|
const val1 = view.getUint8(counter.getDiscardedOffset());
|
|
419
437
|
const val2 = view.getUint8(counter.getDiscardedOffset() + 1);
|
|
@@ -472,6 +490,7 @@ const getArrayBufferIterator = (initialData, maxBytes) => {
|
|
|
472
490
|
isMp3,
|
|
473
491
|
disallowDiscard,
|
|
474
492
|
allowDiscard,
|
|
493
|
+
startBox,
|
|
475
494
|
};
|
|
476
495
|
};
|
|
477
496
|
exports.getArrayBufferIterator = getArrayBufferIterator;
|
|
@@ -1,2 +1,2 @@
|
|
|
1
1
|
import type { MediaFn, MediaFnGeneratorInput } from '../media-fn';
|
|
2
|
-
export declare const createIsoBaseMedia: ({ writer, onBytesProgress, onMillisecondsProgress, logLevel, }: MediaFnGeneratorInput) => Promise<MediaFn>;
|
|
2
|
+
export declare const createIsoBaseMedia: ({ writer, onBytesProgress, onMillisecondsProgress, logLevel, filename, }: MediaFnGeneratorInput) => Promise<MediaFn>;
|
|
@@ -6,13 +6,13 @@ const log_1 = require("../../log");
|
|
|
6
6
|
const create_ftyp_1 = require("./create-ftyp");
|
|
7
7
|
const mp4_header_1 = require("./mp4-header");
|
|
8
8
|
const primitives_1 = require("./primitives");
|
|
9
|
-
const createIsoBaseMedia = async ({ writer, onBytesProgress, onMillisecondsProgress, logLevel, }) => {
|
|
9
|
+
const createIsoBaseMedia = async ({ writer, onBytesProgress, onMillisecondsProgress, logLevel, filename, }) => {
|
|
10
10
|
const header = (0, create_ftyp_1.createIsoBaseMediaFtyp)({
|
|
11
11
|
compatibleBrands: ['isom', 'iso2', 'avc1', 'mp42'],
|
|
12
12
|
majorBrand: 'isom',
|
|
13
13
|
minorBrand: 512,
|
|
14
14
|
});
|
|
15
|
-
const w = await writer.createContent();
|
|
15
|
+
const w = await writer.createContent({ filename, mimeType: 'video/mp4' });
|
|
16
16
|
await w.write(header);
|
|
17
17
|
let durationInUnits = 0;
|
|
18
18
|
const currentTracks = [];
|
|
@@ -113,9 +113,8 @@ const createIsoBaseMedia = async ({ writer, onBytesProgress, onMillisecondsProgr
|
|
|
113
113
|
};
|
|
114
114
|
const waitForFinishPromises = [];
|
|
115
115
|
return {
|
|
116
|
-
save:
|
|
117
|
-
|
|
118
|
-
return file;
|
|
116
|
+
save: () => {
|
|
117
|
+
return w.save();
|
|
119
118
|
},
|
|
120
119
|
remove: async () => {
|
|
121
120
|
await w.remove();
|
|
@@ -159,10 +158,6 @@ const createIsoBaseMedia = async ({ writer, onBytesProgress, onMillisecondsProgr
|
|
|
159
158
|
log_1.Log.verbose(logLevel, 'All write operations done. Waiting for finish...');
|
|
160
159
|
await w.waitForFinish();
|
|
161
160
|
},
|
|
162
|
-
updateDuration: (duration) => {
|
|
163
|
-
operationProm.current = operationProm.current.then(() => updateDuration(duration));
|
|
164
|
-
return operationProm.current;
|
|
165
|
-
},
|
|
166
161
|
};
|
|
167
162
|
};
|
|
168
163
|
exports.createIsoBaseMedia = createIsoBaseMedia;
|
|
@@ -1,3 +1,4 @@
|
|
|
1
|
+
import type { LogLevel } from '../../log';
|
|
1
2
|
import type { AudioOrVideoSample } from '../../webcodec-sample-types';
|
|
2
3
|
import type { Writer } from '../../writers/writer';
|
|
3
4
|
export declare const timestampToClusterTimestamp: (timestamp: number, timescale: number) => number;
|
|
@@ -6,7 +7,12 @@ export declare const canFitInCluster: ({ clusterStartTimestamp, chunk, timescale
|
|
|
6
7
|
chunk: AudioOrVideoSample;
|
|
7
8
|
timescale: number;
|
|
8
9
|
}) => boolean;
|
|
9
|
-
export declare const makeCluster: (
|
|
10
|
+
export declare const makeCluster: ({ writer, clusterStartTimestamp, timescale, logLevel, }: {
|
|
11
|
+
writer: Writer;
|
|
12
|
+
clusterStartTimestamp: number;
|
|
13
|
+
timescale: number;
|
|
14
|
+
logLevel: LogLevel;
|
|
15
|
+
}) => Promise<{
|
|
10
16
|
addSample: (chunk: AudioOrVideoSample, trackNumber: number) => Promise<{
|
|
11
17
|
timecodeRelativeToCluster: number;
|
|
12
18
|
}>;
|
|
@@ -4,6 +4,7 @@ exports.makeCluster = exports.canFitInCluster = exports.timestampToClusterTimest
|
|
|
4
4
|
const ebml_1 = require("../../boxes/webm/ebml");
|
|
5
5
|
const make_header_1 = require("../../boxes/webm/make-header");
|
|
6
6
|
const all_segments_1 = require("../../boxes/webm/segments/all-segments");
|
|
7
|
+
const log_1 = require("../../log");
|
|
7
8
|
const cluster_segment_1 = require("./cluster-segment");
|
|
8
9
|
const maxClusterTimestamp = 2 ** 15;
|
|
9
10
|
const timestampToClusterTimestamp = (timestamp, timescale) => {
|
|
@@ -19,15 +20,16 @@ const canFitInCluster = ({ clusterStartTimestamp, chunk, timescale, }) => {
|
|
|
19
20
|
return timecodeRelativeToCluster <= maxClusterTimestamp;
|
|
20
21
|
};
|
|
21
22
|
exports.canFitInCluster = canFitInCluster;
|
|
22
|
-
const makeCluster = async (
|
|
23
|
+
const makeCluster = async ({ writer, clusterStartTimestamp, timescale, logLevel, }) => {
|
|
24
|
+
log_1.Log.verbose(logLevel, `Making new Matroska cluster with timestamp ${clusterStartTimestamp}`);
|
|
23
25
|
const cluster = (0, cluster_segment_1.createClusterSegment)((0, exports.timestampToClusterTimestamp)(clusterStartTimestamp, timescale));
|
|
24
|
-
const clusterVIntPosition =
|
|
26
|
+
const clusterVIntPosition = writer.getWrittenByteCount() +
|
|
25
27
|
cluster.offsets.offset +
|
|
26
28
|
(0, make_header_1.matroskaToHex)(all_segments_1.matroskaElements.Cluster).byteLength;
|
|
27
29
|
let clusterSize = cluster.bytes.byteLength -
|
|
28
30
|
(0, make_header_1.matroskaToHex)(all_segments_1.matroskaElements.Cluster).byteLength -
|
|
29
31
|
cluster_segment_1.CLUSTER_MIN_VINT_WIDTH;
|
|
30
|
-
await
|
|
32
|
+
await writer.write(cluster.bytes);
|
|
31
33
|
const addSample = async (chunk, trackNumber) => {
|
|
32
34
|
const timecodeRelativeToCluster = (0, exports.timestampToClusterTimestamp)(chunk.timestamp, timescale) -
|
|
33
35
|
(0, exports.timestampToClusterTimestamp)(clusterStartTimestamp, timescale);
|
|
@@ -44,8 +46,8 @@ const makeCluster = async (w, clusterStartTimestamp, timescale) => {
|
|
|
44
46
|
timecodeRelativeToCluster,
|
|
45
47
|
});
|
|
46
48
|
clusterSize += simpleBlock.byteLength;
|
|
47
|
-
await
|
|
48
|
-
await
|
|
49
|
+
await writer.updateDataAt(clusterVIntPosition, (0, ebml_1.getVariableInt)(clusterSize, cluster_segment_1.CLUSTER_MIN_VINT_WIDTH));
|
|
50
|
+
await writer.write(simpleBlock);
|
|
49
51
|
return { timecodeRelativeToCluster };
|
|
50
52
|
};
|
|
51
53
|
const shouldMakeNewCluster = ({ isVideo, chunk, newT, }) => {
|
|
@@ -59,6 +61,7 @@ const makeCluster = async (w, clusterStartTimestamp, timescale) => {
|
|
|
59
61
|
if (!canFit) {
|
|
60
62
|
// We must create a new cluster
|
|
61
63
|
// This is for example if we have an audio-only file
|
|
64
|
+
log_1.Log.verbose(logLevel, `Cannot fit ${chunk.timestamp} in cluster ${clusterStartTimestamp}. Creating new cluster`);
|
|
62
65
|
return true;
|
|
63
66
|
}
|
|
64
67
|
const keyframe = chunk.type === 'key';
|
|
@@ -1,2 +1,2 @@
|
|
|
1
1
|
import type { MediaFn, MediaFnGeneratorInput } from '../media-fn';
|
|
2
|
-
export declare const createMatroskaMedia: ({ writer, onBytesProgress, onMillisecondsProgress, }: MediaFnGeneratorInput) => Promise<MediaFn>;
|
|
2
|
+
export declare const createMatroskaMedia: ({ writer, onBytesProgress, onMillisecondsProgress, filename, logLevel, }: MediaFnGeneratorInput) => Promise<MediaFn>;
|
|
@@ -13,10 +13,10 @@ const matroska_seek_1 = require("./matroska-seek");
|
|
|
13
13
|
const matroska_segment_1 = require("./matroska-segment");
|
|
14
14
|
const matroska_trackentry_1 = require("./matroska-trackentry");
|
|
15
15
|
const timescale = 1000000;
|
|
16
|
-
const createMatroskaMedia = async ({ writer, onBytesProgress, onMillisecondsProgress, }) => {
|
|
16
|
+
const createMatroskaMedia = async ({ writer, onBytesProgress, onMillisecondsProgress, filename, logLevel, }) => {
|
|
17
17
|
var _a, _b, _c, _d, _e, _f, _g;
|
|
18
18
|
const header = (0, matroska_header_1.makeMatroskaHeader)();
|
|
19
|
-
const w = await writer.createContent();
|
|
19
|
+
const w = await writer.createContent({ filename, mimeType: 'video/webm' });
|
|
20
20
|
await w.write(header.bytes);
|
|
21
21
|
const matroskaInfo = (0, matroska_info_1.makeMatroskaInfo)({
|
|
22
22
|
timescale,
|
|
@@ -69,14 +69,27 @@ const createMatroskaMedia = async ({ writer, onBytesProgress, onMillisecondsProg
|
|
|
69
69
|
};
|
|
70
70
|
await w.write(matroskaSegment.bytes);
|
|
71
71
|
const clusterOffset = w.getWrittenByteCount();
|
|
72
|
-
let currentCluster = await (0, cluster_1.makeCluster)(
|
|
72
|
+
let currentCluster = await (0, cluster_1.makeCluster)({
|
|
73
|
+
writer: w,
|
|
74
|
+
clusterStartTimestamp: 0,
|
|
75
|
+
timescale,
|
|
76
|
+
logLevel,
|
|
77
|
+
});
|
|
73
78
|
seeks.push({
|
|
74
79
|
hexString: all_segments_1.matroskaElements.Cluster,
|
|
75
80
|
byte: clusterOffset - seekHeadOffset,
|
|
76
81
|
});
|
|
77
82
|
const trackNumberProgresses = {};
|
|
78
|
-
const getClusterOrMakeNew = async ({ chunk, isVideo, }) => {
|
|
79
|
-
const
|
|
83
|
+
const getClusterOrMakeNew = async ({ chunk, isVideo, trackNumber, }) => {
|
|
84
|
+
const trackProgressValues = Object.values(trackNumberProgresses);
|
|
85
|
+
const smallestProgress = trackProgressValues.length === 0 ? 0 : Math.min(...trackProgressValues);
|
|
86
|
+
// In Safari, samples can arrive out of order, e.g public/bigbuckbunny.mp4
|
|
87
|
+
// Therefore, only updating track number progress if it is a keyframe
|
|
88
|
+
// to allow for timestamps to be lower than the previous one
|
|
89
|
+
// Also doing this AFTER smallestProgress is calculated
|
|
90
|
+
if (chunk.type === 'key') {
|
|
91
|
+
trackNumberProgresses[trackNumber] = chunk.timestamp;
|
|
92
|
+
}
|
|
80
93
|
if (!currentCluster.shouldMakeNewCluster({
|
|
81
94
|
newT: smallestProgress,
|
|
82
95
|
isVideo,
|
|
@@ -84,7 +97,12 @@ const createMatroskaMedia = async ({ writer, onBytesProgress, onMillisecondsProg
|
|
|
84
97
|
})) {
|
|
85
98
|
return { cluster: currentCluster, isNew: false, smallestProgress };
|
|
86
99
|
}
|
|
87
|
-
currentCluster = await (0, cluster_1.makeCluster)(
|
|
100
|
+
currentCluster = await (0, cluster_1.makeCluster)({
|
|
101
|
+
writer: w,
|
|
102
|
+
clusterStartTimestamp: smallestProgress,
|
|
103
|
+
timescale,
|
|
104
|
+
logLevel,
|
|
105
|
+
});
|
|
88
106
|
return { cluster: currentCluster, isNew: true, smallestProgress };
|
|
89
107
|
};
|
|
90
108
|
const updateDuration = async (newDuration) => {
|
|
@@ -94,10 +112,10 @@ const createMatroskaMedia = async ({ writer, onBytesProgress, onMillisecondsProg
|
|
|
94
112
|
};
|
|
95
113
|
const addSample = async ({ chunk, trackNumber, isVideo, }) => {
|
|
96
114
|
var _a;
|
|
97
|
-
trackNumberProgresses[trackNumber] = chunk.timestamp;
|
|
98
115
|
const { cluster, isNew, smallestProgress } = await getClusterOrMakeNew({
|
|
99
116
|
chunk,
|
|
100
117
|
isVideo,
|
|
118
|
+
trackNumber,
|
|
101
119
|
});
|
|
102
120
|
const newDuration = Math.round((chunk.timestamp + ((_a = chunk.duration) !== null && _a !== void 0 ? _a : 0)) / 1000);
|
|
103
121
|
await updateDuration(newDuration);
|
|
@@ -132,9 +150,8 @@ const createMatroskaMedia = async ({ writer, onBytesProgress, onMillisecondsProg
|
|
|
132
150
|
}
|
|
133
151
|
});
|
|
134
152
|
},
|
|
135
|
-
save:
|
|
136
|
-
|
|
137
|
-
return file;
|
|
153
|
+
save: () => {
|
|
154
|
+
return w.save();
|
|
138
155
|
},
|
|
139
156
|
remove: async () => {
|
|
140
157
|
await w.remove();
|
|
@@ -143,10 +160,6 @@ const createMatroskaMedia = async ({ writer, onBytesProgress, onMillisecondsProg
|
|
|
143
160
|
operationProm.current = operationProm.current.then(() => addSample({ chunk, trackNumber, isVideo }));
|
|
144
161
|
return operationProm.current;
|
|
145
162
|
},
|
|
146
|
-
updateDuration: (duration) => {
|
|
147
|
-
operationProm.current = operationProm.current.then(() => updateDuration(duration));
|
|
148
|
-
return operationProm.current;
|
|
149
|
-
},
|
|
150
163
|
addTrack: (track) => {
|
|
151
164
|
const trackNumber = currentTracks.length + 1;
|
|
152
165
|
operationProm.current = operationProm.current.then(() => addTrack({ ...track, trackNumber }));
|
|
@@ -12,7 +12,6 @@ export type MediaFn = {
|
|
|
12
12
|
timescale: number;
|
|
13
13
|
codecPrivate: Uint8Array | null;
|
|
14
14
|
}) => Promise<void>;
|
|
15
|
-
updateDuration: (duration: number) => Promise<void>;
|
|
16
15
|
addTrack: (track: Omit<MakeTrackAudio, 'trackNumber'> | Omit<MakeTrackVideo, 'trackNumber'>) => Promise<{
|
|
17
16
|
trackNumber: number;
|
|
18
17
|
}>;
|
|
@@ -28,4 +27,5 @@ export type MediaFnGeneratorInput = {
|
|
|
28
27
|
onBytesProgress: (totalBytes: number) => void;
|
|
29
28
|
onMillisecondsProgress: (totalMilliseconds: number) => void;
|
|
30
29
|
logLevel: LogLevel;
|
|
30
|
+
filename: string;
|
|
31
31
|
};
|
|
@@ -0,0 +1,49 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.createMp3 = void 0;
|
|
4
|
+
const log_1 = require("../../log");
|
|
5
|
+
const createMp3 = async ({ filename, logLevel, onBytesProgress, onMillisecondsProgress, writer, }) => {
|
|
6
|
+
const w = await writer.createContent({ filename, mimeType: 'audio/mpeg' });
|
|
7
|
+
const operationProm = { current: Promise.resolve() };
|
|
8
|
+
const addSample = async (chunk) => {
|
|
9
|
+
var _a;
|
|
10
|
+
await w.write(chunk.data);
|
|
11
|
+
onMillisecondsProgress(chunk.timestamp + ((_a = chunk.duration) !== null && _a !== void 0 ? _a : 0));
|
|
12
|
+
onBytesProgress(w.getWrittenByteCount());
|
|
13
|
+
};
|
|
14
|
+
const waitForFinishPromises = [];
|
|
15
|
+
return {
|
|
16
|
+
save: () => {
|
|
17
|
+
return w.save();
|
|
18
|
+
},
|
|
19
|
+
remove: () => {
|
|
20
|
+
return w.remove();
|
|
21
|
+
},
|
|
22
|
+
addSample: ({ chunk, trackNumber }) => {
|
|
23
|
+
if (trackNumber !== 1) {
|
|
24
|
+
throw new Error('Only one track supported for WAV');
|
|
25
|
+
}
|
|
26
|
+
operationProm.current = operationProm.current.then(() => addSample(chunk));
|
|
27
|
+
return operationProm.current;
|
|
28
|
+
},
|
|
29
|
+
updateTrackSampleRate: () => {
|
|
30
|
+
throw new Error('updateTrackSampleRate() not implemented for WAV encoder');
|
|
31
|
+
},
|
|
32
|
+
addWaitForFinishPromise(promise) {
|
|
33
|
+
waitForFinishPromises.push(promise);
|
|
34
|
+
},
|
|
35
|
+
async waitForFinish() {
|
|
36
|
+
log_1.Log.verbose(logLevel, 'All write operations queued. Waiting for finish...');
|
|
37
|
+
await Promise.all(waitForFinishPromises.map((p) => p()));
|
|
38
|
+
await operationProm.current;
|
|
39
|
+
await w.waitForFinish();
|
|
40
|
+
},
|
|
41
|
+
addTrack: (track) => {
|
|
42
|
+
if (track.type !== 'audio') {
|
|
43
|
+
throw new Error('Only audio tracks supported for WAV');
|
|
44
|
+
}
|
|
45
|
+
return Promise.resolve({ trackNumber: 1 });
|
|
46
|
+
},
|
|
47
|
+
};
|
|
48
|
+
};
|
|
49
|
+
exports.createMp3 = createMp3;
|
|
@@ -0,0 +1,108 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.createWav = void 0;
|
|
4
|
+
const log_1 = require("../../log");
|
|
5
|
+
const numberTo32BiIntLittleEndian = (num) => {
|
|
6
|
+
return new Uint8Array([
|
|
7
|
+
num & 0xff,
|
|
8
|
+
(num >> 8) & 0xff,
|
|
9
|
+
(num >> 16) & 0xff,
|
|
10
|
+
(num >> 24) & 0xff,
|
|
11
|
+
]);
|
|
12
|
+
};
|
|
13
|
+
const numberTo16BitLittleEndian = (num) => {
|
|
14
|
+
return new Uint8Array([num & 0xff, (num >> 8) & 0xff]);
|
|
15
|
+
};
|
|
16
|
+
const BIT_DEPTH = 16;
|
|
17
|
+
const BYTES_PER_SAMPLE = BIT_DEPTH / 8;
|
|
18
|
+
const createWav = async ({ filename, logLevel, onBytesProgress, onMillisecondsProgress, writer, }) => {
|
|
19
|
+
const w = await writer.createContent({ filename, mimeType: 'audio/wav' });
|
|
20
|
+
await w.write(new Uint8Array([0x52, 0x49, 0x46, 0x46])); // "RIFF"
|
|
21
|
+
const sizePosition = w.getWrittenByteCount();
|
|
22
|
+
await w.write(new Uint8Array([0x00, 0x00, 0x00, 0x00])); // Remaining size
|
|
23
|
+
await w.write(new Uint8Array([0x57, 0x41, 0x56, 0x45])); // "WAVE"
|
|
24
|
+
await w.write(new Uint8Array([0x66, 0x6d, 0x74, 0x20])); // "fmt "
|
|
25
|
+
await w.write(new Uint8Array([0x10, 0x00, 0x00, 0x00])); // fmt chunk size = 16
|
|
26
|
+
await w.write(new Uint8Array([0x01, 0x00])); // Audio format (PCM) = 1, set 3 if float32 would be true
|
|
27
|
+
const channelNumPosition = w.getWrittenByteCount();
|
|
28
|
+
await w.write(new Uint8Array([0x01, 0x00])); // Number of channels = 1
|
|
29
|
+
const sampleRatePosition = w.getWrittenByteCount();
|
|
30
|
+
await w.write(new Uint8Array([0x0, 0x0, 0x00, 0x00])); // Sample rate
|
|
31
|
+
const byteRatePosition = w.getWrittenByteCount();
|
|
32
|
+
await w.write(new Uint8Array([0x0, 0x0, 0x00, 0x00])); // Byte rate
|
|
33
|
+
const blockAlignPosition = w.getWrittenByteCount();
|
|
34
|
+
await w.write(new Uint8Array([0x00, 0x00])); // Block align
|
|
35
|
+
await w.write(numberTo16BitLittleEndian(BIT_DEPTH)); // Bits per sample
|
|
36
|
+
await w.write(new Uint8Array([0x64, 0x61, 0x74, 0x61])); // "data"
|
|
37
|
+
const dataSizePosition = w.getWrittenByteCount();
|
|
38
|
+
await w.write(new Uint8Array([0x00, 0x00, 0x00, 0x00])); // Remaining size
|
|
39
|
+
const operationProm = { current: Promise.resolve() };
|
|
40
|
+
const updateSize = async () => {
|
|
41
|
+
const size = w.getWrittenByteCount() - sizePosition - 4;
|
|
42
|
+
await w.updateDataAt(sizePosition, numberTo32BiIntLittleEndian(size));
|
|
43
|
+
const dataSize = w.getWrittenByteCount() - dataSizePosition - 4;
|
|
44
|
+
await w.updateDataAt(dataSizePosition, numberTo32BiIntLittleEndian(dataSize));
|
|
45
|
+
};
|
|
46
|
+
const updateChannelNum = async (numberOfChannels) => {
|
|
47
|
+
await w.updateDataAt(channelNumPosition, new Uint8Array([numberOfChannels, 0x00]));
|
|
48
|
+
};
|
|
49
|
+
const updateSampleRate = async (sampleRate) => {
|
|
50
|
+
await w.updateDataAt(sampleRatePosition, numberTo32BiIntLittleEndian(sampleRate));
|
|
51
|
+
};
|
|
52
|
+
const updateByteRate = async ({ sampleRate, numberOfChannels, }) => {
|
|
53
|
+
await w.updateDataAt(byteRatePosition, numberTo32BiIntLittleEndian(sampleRate * numberOfChannels + BYTES_PER_SAMPLE));
|
|
54
|
+
};
|
|
55
|
+
const updateBlockAlign = async (numberOfChannels) => {
|
|
56
|
+
await w.updateDataAt(blockAlignPosition, new Uint8Array(numberTo16BitLittleEndian(numberOfChannels * BYTES_PER_SAMPLE)));
|
|
57
|
+
};
|
|
58
|
+
const addSample = async (chunk) => {
|
|
59
|
+
var _a;
|
|
60
|
+
log_1.Log.verbose(logLevel, 'Adding sample', chunk);
|
|
61
|
+
await w.write(chunk.data);
|
|
62
|
+
onMillisecondsProgress((chunk.timestamp + ((_a = chunk.duration) !== null && _a !== void 0 ? _a : 0)) / 1000);
|
|
63
|
+
onBytesProgress(w.getWrittenByteCount());
|
|
64
|
+
};
|
|
65
|
+
const waitForFinishPromises = [];
|
|
66
|
+
return {
|
|
67
|
+
save: () => {
|
|
68
|
+
return w.save();
|
|
69
|
+
},
|
|
70
|
+
remove: () => {
|
|
71
|
+
return w.remove();
|
|
72
|
+
},
|
|
73
|
+
addSample: ({ chunk, trackNumber }) => {
|
|
74
|
+
if (trackNumber !== 1) {
|
|
75
|
+
throw new Error('Only one track supported for WAV');
|
|
76
|
+
}
|
|
77
|
+
operationProm.current = operationProm.current.then(() => addSample(chunk));
|
|
78
|
+
return operationProm.current;
|
|
79
|
+
},
|
|
80
|
+
updateTrackSampleRate: () => {
|
|
81
|
+
throw new Error('updateTrackSampleRate() not implemented for WAV encoder');
|
|
82
|
+
},
|
|
83
|
+
addWaitForFinishPromise: (promise) => {
|
|
84
|
+
waitForFinishPromises.push(promise);
|
|
85
|
+
},
|
|
86
|
+
async waitForFinish() {
|
|
87
|
+
log_1.Log.verbose(logLevel, 'All write operations queued. Waiting for finish...');
|
|
88
|
+
await Promise.all(waitForFinishPromises.map((p) => p()));
|
|
89
|
+
await operationProm.current;
|
|
90
|
+
await updateSize();
|
|
91
|
+
await w.waitForFinish();
|
|
92
|
+
},
|
|
93
|
+
addTrack: async (track) => {
|
|
94
|
+
if (track.type !== 'audio') {
|
|
95
|
+
throw new Error('Only audio tracks supported for WAV');
|
|
96
|
+
}
|
|
97
|
+
await updateChannelNum(track.numberOfChannels);
|
|
98
|
+
await updateSampleRate(track.sampleRate);
|
|
99
|
+
await updateByteRate({
|
|
100
|
+
sampleRate: track.sampleRate,
|
|
101
|
+
numberOfChannels: track.numberOfChannels,
|
|
102
|
+
});
|
|
103
|
+
await updateBlockAlign(track.numberOfChannels);
|
|
104
|
+
return Promise.resolve({ trackNumber: 1 });
|
|
105
|
+
},
|
|
106
|
+
};
|
|
107
|
+
};
|
|
108
|
+
exports.createWav = createWav;
|