@remotion/webcodecs 4.0.210
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE.md +49 -0
- package/README.md +18 -0
- package/dist/audio-decoder-config.d.ts +2 -0
- package/dist/audio-decoder-config.js +13 -0
- package/dist/audio-decoder.d.ts +15 -0
- package/dist/audio-decoder.js +85 -0
- package/dist/audio-encoder-config.d.ts +2 -0
- package/dist/audio-encoder-config.js +13 -0
- package/dist/audio-encoder.d.ts +16 -0
- package/dist/audio-encoder.js +90 -0
- package/dist/codec-id.d.ts +2 -0
- package/dist/codec-id.js +2 -0
- package/dist/convert-media.d.ts +26 -0
- package/dist/convert-media.js +86 -0
- package/dist/create-audio-decoder.d.ts +7 -0
- package/dist/create-audio-decoder.js +18 -0
- package/dist/create-decoder.d.ts +6 -0
- package/dist/create-decoder.js +32 -0
- package/dist/create-encoder.d.ts +9 -0
- package/dist/create-encoder.js +46 -0
- package/dist/create-video-decoder.d.ts +6 -0
- package/dist/create-video-decoder.js +18 -0
- package/dist/decoder.d.ts +7 -0
- package/dist/decoder.js +44 -0
- package/dist/encoder.d.ts +7 -0
- package/dist/encoder.js +43 -0
- package/dist/error-cause.d.ts +8 -0
- package/dist/error-cause.js +3 -0
- package/dist/esm/index.mjs +793 -0
- package/dist/get-config.d.ts +1 -0
- package/dist/get-config.js +21 -0
- package/dist/get-description.d.ts +6 -0
- package/dist/get-description.js +20 -0
- package/dist/get-samples.d.ts +6 -0
- package/dist/get-samples.js +24 -0
- package/dist/index.d.ts +5 -0
- package/dist/index.js +13 -0
- package/dist/load-mp4-file.d.ts +8 -0
- package/dist/load-mp4-file.js +37 -0
- package/dist/on-audio-track.d.ts +15 -0
- package/dist/on-audio-track.js +105 -0
- package/dist/on-video-track.d.ts +15 -0
- package/dist/on-video-track.js +101 -0
- package/dist/reencode-video.d.ts +1 -0
- package/dist/reencode-video.js +68 -0
- package/dist/resolve-audio-action.d.ts +16 -0
- package/dist/resolve-audio-action.js +30 -0
- package/dist/resolve-video-action.d.ts +15 -0
- package/dist/resolve-video-action.js +30 -0
- package/dist/video-decoder-config.d.ts +1 -0
- package/dist/video-decoder-config.js +24 -0
- package/dist/video-decoder.d.ts +14 -0
- package/dist/video-decoder.js +87 -0
- package/dist/video-encoder-config.d.ts +1 -0
- package/dist/video-encoder-config.js +24 -0
- package/dist/video-encoder.d.ts +13 -0
- package/dist/video-encoder.js +90 -0
- package/dist/video-parser.d.ts +1 -0
- package/dist/video-parser.js +51 -0
- package/dist/wait-for-dequeue.d.ts +5 -0
- package/dist/wait-for-dequeue.js +51 -0
- package/dist/with-resolvers.d.ts +5 -0
- package/dist/with-resolvers.js +13 -0
- package/package.json +37 -0
|
@@ -0,0 +1,793 @@
|
|
|
1
|
+
// src/audio-decoder.ts
|
|
2
|
+
var createAudioDecoder = ({
|
|
3
|
+
onFrame,
|
|
4
|
+
onError,
|
|
5
|
+
signal,
|
|
6
|
+
config
|
|
7
|
+
}) => {
|
|
8
|
+
if (signal.aborted) {
|
|
9
|
+
throw new Error("Not creating audio decoder, already aborted");
|
|
10
|
+
}
|
|
11
|
+
let outputQueue = Promise.resolve();
|
|
12
|
+
let outputQueueSize = 0;
|
|
13
|
+
let dequeueResolver = () => {
|
|
14
|
+
};
|
|
15
|
+
const audioDecoder = new AudioDecoder({
|
|
16
|
+
output(inputFrame) {
|
|
17
|
+
outputQueueSize++;
|
|
18
|
+
outputQueue = outputQueue.then(() => onFrame(inputFrame)).then(() => {
|
|
19
|
+
dequeueResolver();
|
|
20
|
+
outputQueueSize--;
|
|
21
|
+
return Promise.resolve();
|
|
22
|
+
});
|
|
23
|
+
},
|
|
24
|
+
error(error) {
|
|
25
|
+
onError(error);
|
|
26
|
+
}
|
|
27
|
+
});
|
|
28
|
+
const close = () => {
|
|
29
|
+
signal.removeEventListener("abort", onAbort);
|
|
30
|
+
if (audioDecoder.state === "closed") {
|
|
31
|
+
return;
|
|
32
|
+
}
|
|
33
|
+
audioDecoder.close();
|
|
34
|
+
};
|
|
35
|
+
const onAbort = () => {
|
|
36
|
+
close();
|
|
37
|
+
};
|
|
38
|
+
signal.addEventListener("abort", onAbort);
|
|
39
|
+
const getQueueSize = () => {
|
|
40
|
+
return audioDecoder.decodeQueueSize + outputQueueSize;
|
|
41
|
+
};
|
|
42
|
+
audioDecoder.configure(config);
|
|
43
|
+
const waitForDequeue = async () => {
|
|
44
|
+
await new Promise((r) => {
|
|
45
|
+
dequeueResolver = r;
|
|
46
|
+
audioDecoder.addEventListener("dequeue", () => r(), {
|
|
47
|
+
once: true
|
|
48
|
+
});
|
|
49
|
+
});
|
|
50
|
+
};
|
|
51
|
+
const waitForFinish = async () => {
|
|
52
|
+
while (getQueueSize() > 0) {
|
|
53
|
+
await waitForDequeue();
|
|
54
|
+
}
|
|
55
|
+
};
|
|
56
|
+
const processSample = async (audioSample) => {
|
|
57
|
+
if (audioDecoder.state === "closed") {
|
|
58
|
+
return;
|
|
59
|
+
}
|
|
60
|
+
while (getQueueSize() > 10) {
|
|
61
|
+
await waitForDequeue();
|
|
62
|
+
}
|
|
63
|
+
const chunk = new EncodedAudioChunk(audioSample);
|
|
64
|
+
audioDecoder.decode(chunk);
|
|
65
|
+
};
|
|
66
|
+
let queue = Promise.resolve();
|
|
67
|
+
return {
|
|
68
|
+
processSample: (sample) => {
|
|
69
|
+
queue = queue.then(() => processSample(sample));
|
|
70
|
+
return queue;
|
|
71
|
+
},
|
|
72
|
+
waitForFinish: async () => {
|
|
73
|
+
await audioDecoder.flush();
|
|
74
|
+
await waitForFinish();
|
|
75
|
+
await outputQueue;
|
|
76
|
+
},
|
|
77
|
+
close,
|
|
78
|
+
getQueueSize,
|
|
79
|
+
flush: async () => {
|
|
80
|
+
await audioDecoder.flush();
|
|
81
|
+
}
|
|
82
|
+
};
|
|
83
|
+
};
|
|
84
|
+
// src/audio-encoder.ts
|
|
85
|
+
var createAudioEncoder = ({
|
|
86
|
+
onChunk,
|
|
87
|
+
onError,
|
|
88
|
+
codec,
|
|
89
|
+
signal,
|
|
90
|
+
config: audioEncoderConfig
|
|
91
|
+
}) => {
|
|
92
|
+
if (signal.aborted) {
|
|
93
|
+
throw new Error("Not creating audio encoder, already aborted");
|
|
94
|
+
}
|
|
95
|
+
let prom = Promise.resolve();
|
|
96
|
+
let outputQueue = 0;
|
|
97
|
+
let dequeueResolver = () => {
|
|
98
|
+
};
|
|
99
|
+
const encoder = new AudioEncoder({
|
|
100
|
+
output: (chunk) => {
|
|
101
|
+
outputQueue++;
|
|
102
|
+
prom = prom.then(() => onChunk(chunk)).then(() => {
|
|
103
|
+
outputQueue--;
|
|
104
|
+
dequeueResolver();
|
|
105
|
+
return Promise.resolve();
|
|
106
|
+
});
|
|
107
|
+
},
|
|
108
|
+
error(error) {
|
|
109
|
+
onError(error);
|
|
110
|
+
}
|
|
111
|
+
});
|
|
112
|
+
const close = () => {
|
|
113
|
+
signal.removeEventListener("abort", onAbort);
|
|
114
|
+
if (encoder.state === "closed") {
|
|
115
|
+
return;
|
|
116
|
+
}
|
|
117
|
+
encoder.close();
|
|
118
|
+
};
|
|
119
|
+
const onAbort = () => {
|
|
120
|
+
close();
|
|
121
|
+
};
|
|
122
|
+
signal.addEventListener("abort", onAbort);
|
|
123
|
+
if (codec !== "opus") {
|
|
124
|
+
throw new Error('Only `codec: "opus"` is supported currently');
|
|
125
|
+
}
|
|
126
|
+
const getQueueSize = () => {
|
|
127
|
+
return encoder.encodeQueueSize + outputQueue;
|
|
128
|
+
};
|
|
129
|
+
encoder.configure(audioEncoderConfig);
|
|
130
|
+
const waitForDequeue = async () => {
|
|
131
|
+
await new Promise((r) => {
|
|
132
|
+
dequeueResolver = r;
|
|
133
|
+
encoder.addEventListener("dequeue", () => r(), {
|
|
134
|
+
once: true
|
|
135
|
+
});
|
|
136
|
+
});
|
|
137
|
+
};
|
|
138
|
+
const waitForFinish = async () => {
|
|
139
|
+
while (getQueueSize() > 0) {
|
|
140
|
+
await waitForDequeue();
|
|
141
|
+
}
|
|
142
|
+
};
|
|
143
|
+
const encodeFrame = async (audioData) => {
|
|
144
|
+
if (encoder.state === "closed") {
|
|
145
|
+
return;
|
|
146
|
+
}
|
|
147
|
+
while (getQueueSize() > 10) {
|
|
148
|
+
await waitForDequeue();
|
|
149
|
+
}
|
|
150
|
+
if (encoder.state === "closed") {
|
|
151
|
+
return;
|
|
152
|
+
}
|
|
153
|
+
encoder.encode(audioData);
|
|
154
|
+
};
|
|
155
|
+
let queue = Promise.resolve();
|
|
156
|
+
return {
|
|
157
|
+
encodeFrame: (audioData) => {
|
|
158
|
+
queue = queue.then(() => encodeFrame(audioData));
|
|
159
|
+
return queue;
|
|
160
|
+
},
|
|
161
|
+
waitForFinish: async () => {
|
|
162
|
+
await encoder.flush();
|
|
163
|
+
await waitForFinish();
|
|
164
|
+
await prom;
|
|
165
|
+
},
|
|
166
|
+
close,
|
|
167
|
+
getQueueSize,
|
|
168
|
+
flush: async () => {
|
|
169
|
+
await encoder.flush();
|
|
170
|
+
}
|
|
171
|
+
};
|
|
172
|
+
};
|
|
173
|
+
// src/convert-media.ts
|
|
174
|
+
import {
|
|
175
|
+
MediaParserInternals,
|
|
176
|
+
parseMedia
|
|
177
|
+
} from "@remotion/media-parser";
|
|
178
|
+
import {bufferWriter} from "@remotion/media-parser/buffer";
|
|
179
|
+
import {canUseWebFsWriter, webFsWriter} from "@remotion/media-parser/web-fs";
|
|
180
|
+
|
|
181
|
+
// src/error-cause.ts
|
|
182
|
+
var error_cause_default = Error;
|
|
183
|
+
|
|
184
|
+
// src/audio-decoder-config.ts
|
|
185
|
+
var getAudioDecoderConfig = async (config) => {
|
|
186
|
+
if (typeof AudioDecoder === "undefined") {
|
|
187
|
+
return null;
|
|
188
|
+
}
|
|
189
|
+
if ((await AudioDecoder.isConfigSupported(config)).supported) {
|
|
190
|
+
return config;
|
|
191
|
+
}
|
|
192
|
+
return null;
|
|
193
|
+
};
|
|
194
|
+
|
|
195
|
+
// src/audio-encoder-config.ts
|
|
196
|
+
var getAudioEncoderConfig = async (config) => {
|
|
197
|
+
if (typeof AudioEncoder === "undefined") {
|
|
198
|
+
return null;
|
|
199
|
+
}
|
|
200
|
+
if ((await AudioEncoder.isConfigSupported(config)).supported) {
|
|
201
|
+
return config;
|
|
202
|
+
}
|
|
203
|
+
return null;
|
|
204
|
+
};
|
|
205
|
+
|
|
206
|
+
// src/resolve-audio-action.ts
|
|
207
|
+
var canCopyAudioTrack = (inputCodec, outputCodec) => {
|
|
208
|
+
if (outputCodec === "opus") {
|
|
209
|
+
return inputCodec === "opus";
|
|
210
|
+
}
|
|
211
|
+
throw new Error(`Unhandled codec: ${outputCodec}`);
|
|
212
|
+
};
|
|
213
|
+
var defaultResolveAudioAction = ({
|
|
214
|
+
canReencode,
|
|
215
|
+
canCopy
|
|
216
|
+
}) => {
|
|
217
|
+
if (canCopy) {
|
|
218
|
+
return "copy";
|
|
219
|
+
}
|
|
220
|
+
if (canReencode) {
|
|
221
|
+
return "reencode";
|
|
222
|
+
}
|
|
223
|
+
return "drop";
|
|
224
|
+
};
|
|
225
|
+
var resolveAudioAction = async ({
|
|
226
|
+
audioDecoderConfig,
|
|
227
|
+
audioEncoderConfig,
|
|
228
|
+
track,
|
|
229
|
+
audioCodec,
|
|
230
|
+
resolverFunction
|
|
231
|
+
}) => {
|
|
232
|
+
const canReencode = Boolean(audioDecoderConfig && audioEncoderConfig);
|
|
233
|
+
const canCopy = canCopyAudioTrack(track.codecWithoutConfig, audioCodec);
|
|
234
|
+
const resolved = await resolverFunction({
|
|
235
|
+
canReencode,
|
|
236
|
+
canCopy
|
|
237
|
+
});
|
|
238
|
+
return resolved;
|
|
239
|
+
};
|
|
240
|
+
|
|
241
|
+
// src/on-audio-track.ts
|
|
242
|
+
var makeAudioTrackHandler = ({
|
|
243
|
+
state,
|
|
244
|
+
audioCodec,
|
|
245
|
+
convertMediaState,
|
|
246
|
+
controller,
|
|
247
|
+
abortConversion,
|
|
248
|
+
onMediaStateUpdate,
|
|
249
|
+
onAudioTrack,
|
|
250
|
+
bitrate
|
|
251
|
+
}) => async (track) => {
|
|
252
|
+
const audioEncoderConfig = await getAudioEncoderConfig({
|
|
253
|
+
codec: audioCodec,
|
|
254
|
+
numberOfChannels: track.numberOfChannels,
|
|
255
|
+
sampleRate: track.sampleRate,
|
|
256
|
+
bitrate
|
|
257
|
+
});
|
|
258
|
+
const audioDecoderConfig = await getAudioDecoderConfig({
|
|
259
|
+
codec: track.codec,
|
|
260
|
+
numberOfChannels: track.numberOfChannels,
|
|
261
|
+
sampleRate: track.sampleRate,
|
|
262
|
+
description: track.description
|
|
263
|
+
});
|
|
264
|
+
const audioOperation = await resolveAudioAction({
|
|
265
|
+
audioDecoderConfig,
|
|
266
|
+
audioEncoderConfig,
|
|
267
|
+
audioCodec,
|
|
268
|
+
track,
|
|
269
|
+
resolverFunction: onAudioTrack
|
|
270
|
+
});
|
|
271
|
+
if (audioOperation === "drop") {
|
|
272
|
+
return null;
|
|
273
|
+
}
|
|
274
|
+
if (audioOperation === "copy") {
|
|
275
|
+
const addedTrack = await state.addTrack({
|
|
276
|
+
type: "audio",
|
|
277
|
+
codec: audioCodec,
|
|
278
|
+
numberOfChannels: track.numberOfChannels,
|
|
279
|
+
sampleRate: track.sampleRate,
|
|
280
|
+
codecPrivate: track.codecPrivate
|
|
281
|
+
});
|
|
282
|
+
return async (audioSample) => {
|
|
283
|
+
await state.addSample(new EncodedAudioChunk(audioSample), addedTrack.trackNumber);
|
|
284
|
+
convertMediaState.encodedAudioFrames++;
|
|
285
|
+
onMediaStateUpdate?.({ ...convertMediaState });
|
|
286
|
+
};
|
|
287
|
+
}
|
|
288
|
+
if (!audioEncoderConfig) {
|
|
289
|
+
abortConversion(new error_cause_default(`Could not configure audio encoder of track ${track.trackId}`));
|
|
290
|
+
return null;
|
|
291
|
+
}
|
|
292
|
+
if (!audioDecoderConfig) {
|
|
293
|
+
abortConversion(new error_cause_default(`Could not configure audio decoder of track ${track.trackId}`));
|
|
294
|
+
return null;
|
|
295
|
+
}
|
|
296
|
+
const { trackNumber } = await state.addTrack({
|
|
297
|
+
type: "audio",
|
|
298
|
+
codec: audioCodec,
|
|
299
|
+
numberOfChannels: track.numberOfChannels,
|
|
300
|
+
sampleRate: track.sampleRate,
|
|
301
|
+
codecPrivate: null
|
|
302
|
+
});
|
|
303
|
+
const audioEncoder = createAudioEncoder({
|
|
304
|
+
onChunk: async (chunk) => {
|
|
305
|
+
await state.addSample(chunk, trackNumber);
|
|
306
|
+
convertMediaState.encodedAudioFrames++;
|
|
307
|
+
onMediaStateUpdate?.({ ...convertMediaState });
|
|
308
|
+
},
|
|
309
|
+
onError: (err) => {
|
|
310
|
+
abortConversion(new error_cause_default(`Audio encoder of ${track.trackId} failed (see .cause of this error)`, {
|
|
311
|
+
cause: err
|
|
312
|
+
}));
|
|
313
|
+
},
|
|
314
|
+
codec: audioCodec,
|
|
315
|
+
signal: controller.signal,
|
|
316
|
+
config: audioEncoderConfig
|
|
317
|
+
});
|
|
318
|
+
const audioDecoder = createAudioDecoder({
|
|
319
|
+
onFrame: async (frame) => {
|
|
320
|
+
await audioEncoder.encodeFrame(frame);
|
|
321
|
+
convertMediaState.decodedAudioFrames++;
|
|
322
|
+
onMediaStateUpdate?.(convertMediaState);
|
|
323
|
+
frame.close();
|
|
324
|
+
},
|
|
325
|
+
onError(error) {
|
|
326
|
+
abortConversion(new error_cause_default(`Audio decoder of track ${track.trackId} failed (see .cause of this error)`, {
|
|
327
|
+
cause: error
|
|
328
|
+
}));
|
|
329
|
+
},
|
|
330
|
+
signal: controller.signal,
|
|
331
|
+
config: audioDecoderConfig
|
|
332
|
+
});
|
|
333
|
+
state.addWaitForFinishPromise(async () => {
|
|
334
|
+
await audioDecoder.waitForFinish();
|
|
335
|
+
await audioEncoder.waitForFinish();
|
|
336
|
+
audioDecoder.close();
|
|
337
|
+
audioEncoder.close();
|
|
338
|
+
});
|
|
339
|
+
return async (audioSample) => {
|
|
340
|
+
await audioDecoder.processSample(audioSample);
|
|
341
|
+
};
|
|
342
|
+
};
|
|
343
|
+
|
|
344
|
+
// src/resolve-video-action.ts
|
|
345
|
+
var canCopyVideoTrack = (inputCodec, outputCodec) => {
|
|
346
|
+
if (outputCodec === "vp8") {
|
|
347
|
+
return inputCodec === "vp8";
|
|
348
|
+
}
|
|
349
|
+
throw new Error(`Unhandled codec: ${outputCodec}`);
|
|
350
|
+
};
|
|
351
|
+
var defaultResolveVideoAction = ({
|
|
352
|
+
canReencode,
|
|
353
|
+
canCopy
|
|
354
|
+
}) => {
|
|
355
|
+
if (canCopy) {
|
|
356
|
+
return "copy";
|
|
357
|
+
}
|
|
358
|
+
if (canReencode) {
|
|
359
|
+
return "reencode";
|
|
360
|
+
}
|
|
361
|
+
return "drop";
|
|
362
|
+
};
|
|
363
|
+
var resolveVideoAction = async ({
|
|
364
|
+
videoDecoderConfig,
|
|
365
|
+
videoEncoderConfig,
|
|
366
|
+
track,
|
|
367
|
+
videoCodec,
|
|
368
|
+
resolverFunction
|
|
369
|
+
}) => {
|
|
370
|
+
const canReencode = Boolean(videoDecoderConfig && videoEncoderConfig);
|
|
371
|
+
const canCopy = canCopyVideoTrack(track.codecWithoutConfig, videoCodec);
|
|
372
|
+
const resolved = await resolverFunction({
|
|
373
|
+
canReencode,
|
|
374
|
+
canCopy
|
|
375
|
+
});
|
|
376
|
+
return resolved;
|
|
377
|
+
};
|
|
378
|
+
|
|
379
|
+
// src/video-decoder.ts
|
|
380
|
+
var createVideoDecoder = ({
|
|
381
|
+
onFrame,
|
|
382
|
+
onError,
|
|
383
|
+
signal,
|
|
384
|
+
config
|
|
385
|
+
}) => {
|
|
386
|
+
let outputQueue = Promise.resolve();
|
|
387
|
+
let outputQueueSize = 0;
|
|
388
|
+
let dequeueResolver = () => {
|
|
389
|
+
};
|
|
390
|
+
const videoDecoder = new VideoDecoder({
|
|
391
|
+
output(inputFrame) {
|
|
392
|
+
outputQueueSize++;
|
|
393
|
+
outputQueue = outputQueue.then(() => onFrame(inputFrame)).then(() => {
|
|
394
|
+
outputQueueSize--;
|
|
395
|
+
dequeueResolver();
|
|
396
|
+
return Promise.resolve();
|
|
397
|
+
});
|
|
398
|
+
},
|
|
399
|
+
error(error) {
|
|
400
|
+
onError(error);
|
|
401
|
+
}
|
|
402
|
+
});
|
|
403
|
+
const close = () => {
|
|
404
|
+
signal.removeEventListener("abort", onAbort);
|
|
405
|
+
if (videoDecoder.state === "closed") {
|
|
406
|
+
return;
|
|
407
|
+
}
|
|
408
|
+
videoDecoder.close();
|
|
409
|
+
};
|
|
410
|
+
const onAbort = () => {
|
|
411
|
+
close();
|
|
412
|
+
};
|
|
413
|
+
signal.addEventListener("abort", onAbort);
|
|
414
|
+
const getQueueSize = () => {
|
|
415
|
+
return videoDecoder.decodeQueueSize + outputQueueSize;
|
|
416
|
+
};
|
|
417
|
+
videoDecoder.configure(config);
|
|
418
|
+
const waitForDequeue = async () => {
|
|
419
|
+
await new Promise((r) => {
|
|
420
|
+
dequeueResolver = r;
|
|
421
|
+
videoDecoder.addEventListener("dequeue", () => r(), {
|
|
422
|
+
once: true
|
|
423
|
+
});
|
|
424
|
+
});
|
|
425
|
+
};
|
|
426
|
+
const waitForFinish = async () => {
|
|
427
|
+
while (getQueueSize() > 0) {
|
|
428
|
+
await waitForDequeue();
|
|
429
|
+
}
|
|
430
|
+
};
|
|
431
|
+
const processSample = async (sample) => {
|
|
432
|
+
if (videoDecoder.state === "closed") {
|
|
433
|
+
return;
|
|
434
|
+
}
|
|
435
|
+
while (getQueueSize() > 10) {
|
|
436
|
+
await waitForDequeue();
|
|
437
|
+
}
|
|
438
|
+
if (videoDecoder.state === "closed") {
|
|
439
|
+
return;
|
|
440
|
+
}
|
|
441
|
+
if (sample.type === "key") {
|
|
442
|
+
await videoDecoder.flush();
|
|
443
|
+
}
|
|
444
|
+
videoDecoder.decode(new EncodedVideoChunk(sample));
|
|
445
|
+
};
|
|
446
|
+
let inputQueue = Promise.resolve();
|
|
447
|
+
return {
|
|
448
|
+
processSample: (sample) => {
|
|
449
|
+
inputQueue = inputQueue.then(() => processSample(sample));
|
|
450
|
+
return inputQueue;
|
|
451
|
+
},
|
|
452
|
+
waitForFinish: async () => {
|
|
453
|
+
await videoDecoder.flush();
|
|
454
|
+
await waitForFinish();
|
|
455
|
+
await outputQueue;
|
|
456
|
+
await inputQueue;
|
|
457
|
+
},
|
|
458
|
+
close,
|
|
459
|
+
getQueueSize,
|
|
460
|
+
flush: async () => {
|
|
461
|
+
await videoDecoder.flush();
|
|
462
|
+
}
|
|
463
|
+
};
|
|
464
|
+
};
|
|
465
|
+
|
|
466
|
+
// src/video-decoder-config.ts
|
|
467
|
+
var getVideoDecoderConfigWithHardwareAcceleration = async (config) => {
|
|
468
|
+
if (typeof VideoDecoder === "undefined") {
|
|
469
|
+
return null;
|
|
470
|
+
}
|
|
471
|
+
const hardware = {
|
|
472
|
+
...config,
|
|
473
|
+
hardwareAcceleration: "prefer-hardware"
|
|
474
|
+
};
|
|
475
|
+
if ((await VideoDecoder.isConfigSupported(hardware)).supported) {
|
|
476
|
+
return hardware;
|
|
477
|
+
}
|
|
478
|
+
const software = {
|
|
479
|
+
...config,
|
|
480
|
+
hardwareAcceleration: "prefer-software"
|
|
481
|
+
};
|
|
482
|
+
if ((await VideoDecoder.isConfigSupported(software)).supported) {
|
|
483
|
+
return software;
|
|
484
|
+
}
|
|
485
|
+
return null;
|
|
486
|
+
};
|
|
487
|
+
|
|
488
|
+
// src/video-encoder.ts
|
|
489
|
+
var createVideoEncoder = ({
|
|
490
|
+
onChunk,
|
|
491
|
+
onError,
|
|
492
|
+
signal,
|
|
493
|
+
config
|
|
494
|
+
}) => {
|
|
495
|
+
if (signal.aborted) {
|
|
496
|
+
throw new Error("Not creating video encoder, already aborted");
|
|
497
|
+
}
|
|
498
|
+
let outputQueue = Promise.resolve();
|
|
499
|
+
let outputQueueSize = 0;
|
|
500
|
+
let dequeueResolver = () => {
|
|
501
|
+
};
|
|
502
|
+
const encoder = new VideoEncoder({
|
|
503
|
+
error(error) {
|
|
504
|
+
onError(error);
|
|
505
|
+
},
|
|
506
|
+
output(chunk) {
|
|
507
|
+
outputQueueSize++;
|
|
508
|
+
outputQueue = outputQueue.then(() => onChunk(chunk)).then(() => {
|
|
509
|
+
outputQueueSize--;
|
|
510
|
+
dequeueResolver();
|
|
511
|
+
return Promise.resolve();
|
|
512
|
+
});
|
|
513
|
+
}
|
|
514
|
+
});
|
|
515
|
+
const close = () => {
|
|
516
|
+
signal.removeEventListener("abort", onAbort);
|
|
517
|
+
if (encoder.state === "closed") {
|
|
518
|
+
return;
|
|
519
|
+
}
|
|
520
|
+
encoder.close();
|
|
521
|
+
};
|
|
522
|
+
const onAbort = () => {
|
|
523
|
+
close();
|
|
524
|
+
};
|
|
525
|
+
signal.addEventListener("abort", onAbort);
|
|
526
|
+
const getQueueSize = () => {
|
|
527
|
+
return encoder.encodeQueueSize + outputQueueSize;
|
|
528
|
+
};
|
|
529
|
+
encoder.configure(config);
|
|
530
|
+
let framesProcessed = 0;
|
|
531
|
+
const waitForDequeue = async () => {
|
|
532
|
+
await new Promise((r) => {
|
|
533
|
+
dequeueResolver = r;
|
|
534
|
+
encoder.addEventListener("dequeue", () => r(), {
|
|
535
|
+
once: true
|
|
536
|
+
});
|
|
537
|
+
});
|
|
538
|
+
};
|
|
539
|
+
const waitForFinish = async () => {
|
|
540
|
+
while (getQueueSize() > 0) {
|
|
541
|
+
await waitForDequeue();
|
|
542
|
+
}
|
|
543
|
+
};
|
|
544
|
+
const encodeFrame = async (frame) => {
|
|
545
|
+
if (encoder.state === "closed") {
|
|
546
|
+
return;
|
|
547
|
+
}
|
|
548
|
+
while (getQueueSize() > 10) {
|
|
549
|
+
await waitForDequeue();
|
|
550
|
+
}
|
|
551
|
+
if (encoder.state === "closed") {
|
|
552
|
+
return;
|
|
553
|
+
}
|
|
554
|
+
encoder.encode(frame, {
|
|
555
|
+
keyFrame: framesProcessed % 40 === 0
|
|
556
|
+
});
|
|
557
|
+
framesProcessed++;
|
|
558
|
+
};
|
|
559
|
+
let inputQueue = Promise.resolve();
|
|
560
|
+
return {
|
|
561
|
+
encodeFrame: (frame) => {
|
|
562
|
+
inputQueue = inputQueue.then(() => encodeFrame(frame));
|
|
563
|
+
return inputQueue;
|
|
564
|
+
},
|
|
565
|
+
waitForFinish: async () => {
|
|
566
|
+
await encoder.flush();
|
|
567
|
+
await outputQueue;
|
|
568
|
+
await waitForFinish();
|
|
569
|
+
},
|
|
570
|
+
close,
|
|
571
|
+
getQueueSize,
|
|
572
|
+
flush: async () => {
|
|
573
|
+
await encoder.flush();
|
|
574
|
+
}
|
|
575
|
+
};
|
|
576
|
+
};
|
|
577
|
+
|
|
578
|
+
// src/video-encoder-config.ts
|
|
579
|
+
var getVideoEncoderConfig = async (config) => {
|
|
580
|
+
if (typeof VideoEncoder === "undefined") {
|
|
581
|
+
return null;
|
|
582
|
+
}
|
|
583
|
+
const hardware = {
|
|
584
|
+
...config,
|
|
585
|
+
hardwareAcceleration: "prefer-hardware"
|
|
586
|
+
};
|
|
587
|
+
if ((await VideoEncoder.isConfigSupported(hardware)).supported) {
|
|
588
|
+
return hardware;
|
|
589
|
+
}
|
|
590
|
+
const software = {
|
|
591
|
+
...config,
|
|
592
|
+
hardwareAcceleration: "prefer-software"
|
|
593
|
+
};
|
|
594
|
+
if ((await VideoEncoder.isConfigSupported(software)).supported) {
|
|
595
|
+
return software;
|
|
596
|
+
}
|
|
597
|
+
return null;
|
|
598
|
+
};
|
|
599
|
+
|
|
600
|
+
// src/on-video-track.ts
|
|
601
|
+
var makeVideoTrackHandler = ({
|
|
602
|
+
state,
|
|
603
|
+
onVideoFrame,
|
|
604
|
+
onMediaStateUpdate,
|
|
605
|
+
abortConversion,
|
|
606
|
+
convertMediaState,
|
|
607
|
+
controller,
|
|
608
|
+
videoCodec,
|
|
609
|
+
onVideoTrack
|
|
610
|
+
}) => async (track) => {
|
|
611
|
+
const videoEncoderConfig = await getVideoEncoderConfig({
|
|
612
|
+
codec: videoCodec,
|
|
613
|
+
height: track.displayAspectHeight,
|
|
614
|
+
width: track.displayAspectWidth
|
|
615
|
+
});
|
|
616
|
+
const videoDecoderConfig = await getVideoDecoderConfigWithHardwareAcceleration(track);
|
|
617
|
+
const videoOperation = await resolveVideoAction({
|
|
618
|
+
videoDecoderConfig,
|
|
619
|
+
videoEncoderConfig,
|
|
620
|
+
track,
|
|
621
|
+
videoCodec,
|
|
622
|
+
resolverFunction: onVideoTrack
|
|
623
|
+
});
|
|
624
|
+
if (videoOperation === "drop") {
|
|
625
|
+
return null;
|
|
626
|
+
}
|
|
627
|
+
if (videoOperation === "copy") {
|
|
628
|
+
const videoTrack = await state.addTrack({
|
|
629
|
+
type: "video",
|
|
630
|
+
color: track.color,
|
|
631
|
+
width: track.codedWidth,
|
|
632
|
+
height: track.codedHeight,
|
|
633
|
+
codec: track.codecWithoutConfig,
|
|
634
|
+
codecPrivate: track.codecPrivate
|
|
635
|
+
});
|
|
636
|
+
return (sample) => {
|
|
637
|
+
state.addSample(new EncodedVideoChunk(sample), videoTrack.trackNumber);
|
|
638
|
+
convertMediaState.decodedVideoFrames++;
|
|
639
|
+
onMediaStateUpdate?.({ ...convertMediaState });
|
|
640
|
+
};
|
|
641
|
+
}
|
|
642
|
+
if (videoEncoderConfig === null) {
|
|
643
|
+
abortConversion(new error_cause_default(`Could not configure video encoder of track ${track.trackId}`));
|
|
644
|
+
return null;
|
|
645
|
+
}
|
|
646
|
+
if (videoDecoderConfig === null) {
|
|
647
|
+
abortConversion(new error_cause_default(`Could not configure video decoder of track ${track.trackId}`));
|
|
648
|
+
return null;
|
|
649
|
+
}
|
|
650
|
+
const { trackNumber } = await state.addTrack({
|
|
651
|
+
type: "video",
|
|
652
|
+
color: track.color,
|
|
653
|
+
width: track.codedWidth,
|
|
654
|
+
height: track.codedHeight,
|
|
655
|
+
codec: videoCodec,
|
|
656
|
+
codecPrivate: null
|
|
657
|
+
});
|
|
658
|
+
const videoEncoder = createVideoEncoder({
|
|
659
|
+
onChunk: async (chunk) => {
|
|
660
|
+
await state.addSample(chunk, trackNumber);
|
|
661
|
+
convertMediaState.encodedVideoFrames++;
|
|
662
|
+
onMediaStateUpdate?.({ ...convertMediaState });
|
|
663
|
+
},
|
|
664
|
+
onError: (err) => {
|
|
665
|
+
abortConversion(new error_cause_default(`Video encoder of track ${track.trackId} failed (see .cause of this error)`, {
|
|
666
|
+
cause: err
|
|
667
|
+
}));
|
|
668
|
+
},
|
|
669
|
+
signal: controller.signal,
|
|
670
|
+
config: videoEncoderConfig
|
|
671
|
+
});
|
|
672
|
+
const videoDecoder = createVideoDecoder({
|
|
673
|
+
config: videoDecoderConfig,
|
|
674
|
+
onFrame: async (frame) => {
|
|
675
|
+
await onVideoFrame?.(frame, track);
|
|
676
|
+
await videoEncoder.encodeFrame(frame);
|
|
677
|
+
convertMediaState.decodedVideoFrames++;
|
|
678
|
+
onMediaStateUpdate?.({ ...convertMediaState });
|
|
679
|
+
frame.close();
|
|
680
|
+
},
|
|
681
|
+
onError: (err) => {
|
|
682
|
+
abortConversion(new error_cause_default(`Video decoder of track ${track.trackId} failed (see .cause of this error)`, {
|
|
683
|
+
cause: err
|
|
684
|
+
}));
|
|
685
|
+
},
|
|
686
|
+
signal: controller.signal
|
|
687
|
+
});
|
|
688
|
+
state.addWaitForFinishPromise(async () => {
|
|
689
|
+
await videoDecoder.waitForFinish();
|
|
690
|
+
await videoEncoder.waitForFinish();
|
|
691
|
+
videoDecoder.close();
|
|
692
|
+
videoEncoder.close();
|
|
693
|
+
});
|
|
694
|
+
return async (chunk) => {
|
|
695
|
+
await videoDecoder.processSample(chunk);
|
|
696
|
+
};
|
|
697
|
+
};
|
|
698
|
+
|
|
699
|
+
// src/with-resolvers.ts
|
|
700
|
+
var withResolvers = function() {
|
|
701
|
+
let resolve;
|
|
702
|
+
let reject;
|
|
703
|
+
const promise = new Promise((res, rej) => {
|
|
704
|
+
resolve = res;
|
|
705
|
+
reject = rej;
|
|
706
|
+
});
|
|
707
|
+
return { promise, resolve, reject };
|
|
708
|
+
};
|
|
709
|
+
|
|
710
|
+
// src/convert-media.ts
|
|
711
|
+
var convertMedia = async ({
|
|
712
|
+
src,
|
|
713
|
+
onVideoFrame,
|
|
714
|
+
onMediaStateUpdate,
|
|
715
|
+
audioCodec,
|
|
716
|
+
to,
|
|
717
|
+
videoCodec,
|
|
718
|
+
signal: userPassedAbortSignal,
|
|
719
|
+
onAudioTrack: userAudioResolver,
|
|
720
|
+
onVideoTrack: userVideoResolver
|
|
721
|
+
}) => {
|
|
722
|
+
if (to !== "webm") {
|
|
723
|
+
return Promise.reject(new TypeError('Only `to: "webm"` is supported currently'));
|
|
724
|
+
}
|
|
725
|
+
if (audioCodec !== "opus") {
|
|
726
|
+
return Promise.reject(new TypeError('Only `audioCodec: "opus"` is supported currently'));
|
|
727
|
+
}
|
|
728
|
+
if (videoCodec !== "vp8") {
|
|
729
|
+
return Promise.reject(new TypeError('Only `videoCodec: "vp8"` is supported currently'));
|
|
730
|
+
}
|
|
731
|
+
const { promise, resolve, reject } = withResolvers();
|
|
732
|
+
const controller = new AbortController;
|
|
733
|
+
const abortConversion = (errCause) => {
|
|
734
|
+
reject(errCause);
|
|
735
|
+
if (!controller.signal.aborted) {
|
|
736
|
+
controller.abort();
|
|
737
|
+
}
|
|
738
|
+
};
|
|
739
|
+
const onUserAbort = () => {
|
|
740
|
+
abortConversion(new error_cause_default("Conversion aborted by user"));
|
|
741
|
+
};
|
|
742
|
+
userPassedAbortSignal?.addEventListener("abort", onUserAbort);
|
|
743
|
+
const convertMediaState = {
|
|
744
|
+
decodedAudioFrames: 0,
|
|
745
|
+
decodedVideoFrames: 0,
|
|
746
|
+
encodedVideoFrames: 0,
|
|
747
|
+
encodedAudioFrames: 0
|
|
748
|
+
};
|
|
749
|
+
const canUseWebFs = await canUseWebFsWriter();
|
|
750
|
+
const state = await MediaParserInternals.createMedia(canUseWebFs ? webFsWriter : bufferWriter);
|
|
751
|
+
const onVideoTrack = makeVideoTrackHandler({
|
|
752
|
+
state,
|
|
753
|
+
onVideoFrame: onVideoFrame ?? null,
|
|
754
|
+
onMediaStateUpdate: onMediaStateUpdate ?? null,
|
|
755
|
+
abortConversion,
|
|
756
|
+
convertMediaState,
|
|
757
|
+
controller,
|
|
758
|
+
videoCodec,
|
|
759
|
+
onVideoTrack: userVideoResolver ?? defaultResolveVideoAction
|
|
760
|
+
});
|
|
761
|
+
const onAudioTrack = makeAudioTrackHandler({
|
|
762
|
+
abortConversion,
|
|
763
|
+
audioCodec,
|
|
764
|
+
controller,
|
|
765
|
+
convertMediaState,
|
|
766
|
+
onMediaStateUpdate: onMediaStateUpdate ?? null,
|
|
767
|
+
state,
|
|
768
|
+
onAudioTrack: userAudioResolver ?? defaultResolveAudioAction,
|
|
769
|
+
bitrate: 128000
|
|
770
|
+
});
|
|
771
|
+
parseMedia({
|
|
772
|
+
src,
|
|
773
|
+
onVideoTrack,
|
|
774
|
+
onAudioTrack,
|
|
775
|
+
signal: controller.signal
|
|
776
|
+
}).then(() => {
|
|
777
|
+
return state.waitForFinish();
|
|
778
|
+
}).then(() => {
|
|
779
|
+
resolve({ save: state.save, remove: state.remove });
|
|
780
|
+
}).catch((err) => {
|
|
781
|
+
reject(err);
|
|
782
|
+
}).finally(() => {
|
|
783
|
+
userPassedAbortSignal?.removeEventListener("abort", onUserAbort);
|
|
784
|
+
});
|
|
785
|
+
return promise;
|
|
786
|
+
};
|
|
787
|
+
export {
|
|
788
|
+
createVideoEncoder,
|
|
789
|
+
createVideoDecoder,
|
|
790
|
+
createAudioEncoder,
|
|
791
|
+
createAudioDecoder,
|
|
792
|
+
convertMedia
|
|
793
|
+
};
|