@remotion/webcodecs 4.0.227 → 4.0.228
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/audio-decoder.d.ts +3 -3
- package/dist/audio-decoder.js +23 -29
- package/dist/audio-encoder.d.ts +3 -2
- package/dist/audio-encoder.js +17 -29
- package/dist/auto-select-writer.d.ts +3 -0
- package/dist/auto-select-writer.js +20 -0
- package/dist/calculate-progress.d.ts +4 -0
- package/dist/calculate-progress.js +10 -0
- package/dist/convert-media.d.ts +11 -4
- package/dist/convert-media.js +60 -9
- package/dist/esm/index.mjs +339 -150
- package/dist/event-emitter.d.ts +25 -0
- package/dist/event-emitter.js +23 -0
- package/dist/io-manager/event-emitter.d.ts +27 -0
- package/dist/io-manager/event-emitter.js +24 -0
- package/dist/io-manager/io-synchronizer.d.ts +12 -0
- package/dist/io-manager/io-synchronizer.js +95 -0
- package/dist/log.d.ts +10 -0
- package/dist/log.js +6 -0
- package/dist/on-audio-track.d.ts +3 -2
- package/dist/on-audio-track.js +3 -1
- package/dist/on-video-track.d.ts +3 -2
- package/dist/on-video-track.js +8 -3
- package/dist/video-decoder.d.ts +3 -3
- package/dist/video-decoder.js +23 -28
- package/dist/video-encoder.d.ts +3 -2
- package/dist/video-encoder.js +26 -29
- package/dist/wait-until-return.d.ts +4 -0
- package/dist/wait-until-return.js +14 -0
- package/dist/with-resolvers.d.ts +5 -0
- package/dist/with-resolvers.js +16 -1
- package/package.json +3 -3
package/dist/audio-decoder.d.ts
CHANGED
|
@@ -1,14 +1,14 @@
|
|
|
1
|
-
import type { AudioSample } from '@remotion/media-parser';
|
|
1
|
+
import type { AudioSample, LogLevel } from '@remotion/media-parser';
|
|
2
2
|
export type WebCodecsAudioDecoder = {
|
|
3
3
|
processSample: (audioSample: AudioSample) => Promise<void>;
|
|
4
4
|
waitForFinish: () => Promise<void>;
|
|
5
5
|
close: () => void;
|
|
6
|
-
getQueueSize: () => number;
|
|
7
6
|
flush: () => Promise<void>;
|
|
8
7
|
};
|
|
9
|
-
export declare const createAudioDecoder: ({ onFrame, onError, signal, config, }: {
|
|
8
|
+
export declare const createAudioDecoder: ({ onFrame, onError, signal, config, logLevel, }: {
|
|
10
9
|
onFrame: (frame: AudioData) => Promise<void>;
|
|
11
10
|
onError: (error: DOMException) => void;
|
|
12
11
|
signal: AbortSignal;
|
|
13
12
|
config: AudioDecoderConfig;
|
|
13
|
+
logLevel: LogLevel;
|
|
14
14
|
}) => WebCodecsAudioDecoder;
|
package/dist/audio-decoder.js
CHANGED
|
@@ -1,22 +1,35 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
3
|
exports.createAudioDecoder = void 0;
|
|
4
|
-
const
|
|
4
|
+
const io_synchronizer_1 = require("./io-manager/io-synchronizer");
|
|
5
|
+
const createAudioDecoder = ({ onFrame, onError, signal, config, logLevel, }) => {
|
|
5
6
|
if (signal.aborted) {
|
|
6
7
|
throw new Error('Not creating audio decoder, already aborted');
|
|
7
8
|
}
|
|
9
|
+
const ioSynchronizer = (0, io_synchronizer_1.makeIoSynchronizer)(logLevel, 'Audio decoder');
|
|
8
10
|
let outputQueue = Promise.resolve();
|
|
9
|
-
let outputQueueSize = 0;
|
|
10
|
-
let dequeueResolver = () => { };
|
|
11
11
|
const audioDecoder = new AudioDecoder({
|
|
12
12
|
output(inputFrame) {
|
|
13
|
-
|
|
13
|
+
ioSynchronizer.onOutput(inputFrame.timestamp);
|
|
14
|
+
const abortHandler = () => {
|
|
15
|
+
inputFrame.close();
|
|
16
|
+
};
|
|
17
|
+
signal.addEventListener('abort', abortHandler, { once: true });
|
|
14
18
|
outputQueue = outputQueue
|
|
15
|
-
.then(() => onFrame(inputFrame))
|
|
16
19
|
.then(() => {
|
|
17
|
-
|
|
18
|
-
|
|
20
|
+
if (signal.aborted) {
|
|
21
|
+
return;
|
|
22
|
+
}
|
|
23
|
+
return onFrame(inputFrame);
|
|
24
|
+
})
|
|
25
|
+
.then(() => {
|
|
26
|
+
ioSynchronizer.onProcessed();
|
|
27
|
+
signal.removeEventListener('abort', abortHandler);
|
|
19
28
|
return Promise.resolve();
|
|
29
|
+
})
|
|
30
|
+
.catch((err) => {
|
|
31
|
+
inputFrame.close();
|
|
32
|
+
onError(err);
|
|
20
33
|
});
|
|
21
34
|
},
|
|
22
35
|
error(error) {
|
|
@@ -35,34 +48,16 @@ const createAudioDecoder = ({ onFrame, onError, signal, config, }) => {
|
|
|
35
48
|
close();
|
|
36
49
|
};
|
|
37
50
|
signal.addEventListener('abort', onAbort);
|
|
38
|
-
const getQueueSize = () => {
|
|
39
|
-
return audioDecoder.decodeQueueSize + outputQueueSize;
|
|
40
|
-
};
|
|
41
51
|
audioDecoder.configure(config);
|
|
42
|
-
const waitForDequeue = async () => {
|
|
43
|
-
await new Promise((r) => {
|
|
44
|
-
dequeueResolver = r;
|
|
45
|
-
// @ts-expect-error exists
|
|
46
|
-
audioDecoder.addEventListener('dequeue', () => r(), {
|
|
47
|
-
once: true,
|
|
48
|
-
});
|
|
49
|
-
});
|
|
50
|
-
};
|
|
51
|
-
const waitForFinish = async () => {
|
|
52
|
-
while (getQueueSize() > 0) {
|
|
53
|
-
await waitForDequeue();
|
|
54
|
-
}
|
|
55
|
-
};
|
|
56
52
|
const processSample = async (audioSample) => {
|
|
57
53
|
if (audioDecoder.state === 'closed') {
|
|
58
54
|
return;
|
|
59
55
|
}
|
|
60
|
-
|
|
61
|
-
await waitForDequeue();
|
|
62
|
-
}
|
|
56
|
+
await ioSynchronizer.waitFor({ unemitted: 100, _unprocessed: 2 });
|
|
63
57
|
// Don't flush, it messes up the audio
|
|
64
58
|
const chunk = new EncodedAudioChunk(audioSample);
|
|
65
59
|
audioDecoder.decode(chunk);
|
|
60
|
+
ioSynchronizer.inputItem(chunk.timestamp, audioSample.type === 'key');
|
|
66
61
|
};
|
|
67
62
|
let queue = Promise.resolve();
|
|
68
63
|
return {
|
|
@@ -72,11 +67,10 @@ const createAudioDecoder = ({ onFrame, onError, signal, config, }) => {
|
|
|
72
67
|
},
|
|
73
68
|
waitForFinish: async () => {
|
|
74
69
|
await audioDecoder.flush();
|
|
75
|
-
await waitForFinish();
|
|
70
|
+
await ioSynchronizer.waitForFinish();
|
|
76
71
|
await outputQueue;
|
|
77
72
|
},
|
|
78
73
|
close,
|
|
79
|
-
getQueueSize,
|
|
80
74
|
flush: async () => {
|
|
81
75
|
await audioDecoder.flush();
|
|
82
76
|
},
|
package/dist/audio-encoder.d.ts
CHANGED
|
@@ -1,15 +1,16 @@
|
|
|
1
|
+
import type { LogLevel } from '@remotion/media-parser';
|
|
1
2
|
import type { ConvertMediaAudioCodec } from './codec-id';
|
|
2
3
|
export type WebCodecsAudioEncoder = {
|
|
3
4
|
encodeFrame: (audioData: AudioData) => Promise<void>;
|
|
4
5
|
waitForFinish: () => Promise<void>;
|
|
5
6
|
close: () => void;
|
|
6
|
-
getQueueSize: () => number;
|
|
7
7
|
flush: () => Promise<void>;
|
|
8
8
|
};
|
|
9
|
-
export declare const createAudioEncoder: ({ onChunk, onError, codec, signal, config: audioEncoderConfig, }: {
|
|
9
|
+
export declare const createAudioEncoder: ({ onChunk, onError, codec, signal, config: audioEncoderConfig, logLevel, }: {
|
|
10
10
|
onChunk: (chunk: EncodedAudioChunk) => Promise<void>;
|
|
11
11
|
onError: (error: DOMException) => void;
|
|
12
12
|
codec: ConvertMediaAudioCodec;
|
|
13
13
|
signal: AbortSignal;
|
|
14
14
|
config: AudioEncoderConfig;
|
|
15
|
+
logLevel: LogLevel;
|
|
15
16
|
}) => WebCodecsAudioEncoder;
|
package/dist/audio-encoder.js
CHANGED
|
@@ -1,22 +1,29 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
3
|
exports.createAudioEncoder = void 0;
|
|
4
|
-
const
|
|
4
|
+
const io_synchronizer_1 = require("./io-manager/io-synchronizer");
|
|
5
|
+
const createAudioEncoder = ({ onChunk, onError, codec, signal, config: audioEncoderConfig, logLevel, }) => {
|
|
5
6
|
if (signal.aborted) {
|
|
6
7
|
throw new Error('Not creating audio encoder, already aborted');
|
|
7
8
|
}
|
|
9
|
+
const ioSynchronizer = (0, io_synchronizer_1.makeIoSynchronizer)(logLevel, 'Audio encoder');
|
|
8
10
|
let prom = Promise.resolve();
|
|
9
|
-
let outputQueue = 0;
|
|
10
|
-
let dequeueResolver = () => { };
|
|
11
11
|
const encoder = new AudioEncoder({
|
|
12
12
|
output: (chunk) => {
|
|
13
|
-
|
|
13
|
+
ioSynchronizer.onOutput(chunk.timestamp);
|
|
14
14
|
prom = prom
|
|
15
|
-
.then(() => onChunk(chunk))
|
|
16
15
|
.then(() => {
|
|
17
|
-
|
|
18
|
-
|
|
16
|
+
if (signal.aborted) {
|
|
17
|
+
return;
|
|
18
|
+
}
|
|
19
|
+
return onChunk(chunk);
|
|
20
|
+
})
|
|
21
|
+
.then(() => {
|
|
22
|
+
ioSynchronizer.onProcessed();
|
|
19
23
|
return Promise.resolve();
|
|
24
|
+
})
|
|
25
|
+
.catch((err) => {
|
|
26
|
+
onError(err);
|
|
20
27
|
});
|
|
21
28
|
},
|
|
22
29
|
error(error) {
|
|
@@ -38,36 +45,18 @@ const createAudioEncoder = ({ onChunk, onError, codec, signal, config: audioEnco
|
|
|
38
45
|
if (codec !== 'opus') {
|
|
39
46
|
throw new Error('Only `codec: "opus"` is supported currently');
|
|
40
47
|
}
|
|
41
|
-
const getQueueSize = () => {
|
|
42
|
-
return encoder.encodeQueueSize + outputQueue;
|
|
43
|
-
};
|
|
44
48
|
encoder.configure(audioEncoderConfig);
|
|
45
|
-
const waitForDequeue = async () => {
|
|
46
|
-
await new Promise((r) => {
|
|
47
|
-
dequeueResolver = r;
|
|
48
|
-
// @ts-expect-error exists
|
|
49
|
-
encoder.addEventListener('dequeue', () => r(), {
|
|
50
|
-
once: true,
|
|
51
|
-
});
|
|
52
|
-
});
|
|
53
|
-
};
|
|
54
|
-
const waitForFinish = async () => {
|
|
55
|
-
while (getQueueSize() > 0) {
|
|
56
|
-
await waitForDequeue();
|
|
57
|
-
}
|
|
58
|
-
};
|
|
59
49
|
const encodeFrame = async (audioData) => {
|
|
60
50
|
if (encoder.state === 'closed') {
|
|
61
51
|
return;
|
|
62
52
|
}
|
|
63
|
-
|
|
64
|
-
await waitForDequeue();
|
|
65
|
-
}
|
|
53
|
+
await ioSynchronizer.waitFor({ unemitted: 2, _unprocessed: 2 });
|
|
66
54
|
// @ts-expect-error - can have changed in the meanwhile
|
|
67
55
|
if (encoder.state === 'closed') {
|
|
68
56
|
return;
|
|
69
57
|
}
|
|
70
58
|
encoder.encode(audioData);
|
|
59
|
+
ioSynchronizer.inputItem(audioData.timestamp, true);
|
|
71
60
|
};
|
|
72
61
|
let queue = Promise.resolve();
|
|
73
62
|
return {
|
|
@@ -77,11 +66,10 @@ const createAudioEncoder = ({ onChunk, onError, codec, signal, config: audioEnco
|
|
|
77
66
|
},
|
|
78
67
|
waitForFinish: async () => {
|
|
79
68
|
await encoder.flush();
|
|
80
|
-
await waitForFinish();
|
|
69
|
+
await ioSynchronizer.waitForFinish();
|
|
81
70
|
await prom;
|
|
82
71
|
},
|
|
83
72
|
close,
|
|
84
|
-
getQueueSize,
|
|
85
73
|
flush: async () => {
|
|
86
74
|
await encoder.flush();
|
|
87
75
|
},
|
|
@@ -0,0 +1,20 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.autoSelectWriter = void 0;
|
|
4
|
+
const buffer_1 = require("@remotion/media-parser/buffer");
|
|
5
|
+
const web_fs_1 = require("@remotion/media-parser/web-fs");
|
|
6
|
+
const log_1 = require("./log");
|
|
7
|
+
const autoSelectWriter = async (writer, logLevel) => {
|
|
8
|
+
if (writer) {
|
|
9
|
+
log_1.Log.verbose(logLevel, 'Using writer provided by user');
|
|
10
|
+
return writer;
|
|
11
|
+
}
|
|
12
|
+
log_1.Log.verbose(logLevel, 'Determining best writer');
|
|
13
|
+
if (await (0, web_fs_1.canUseWebFsWriter)()) {
|
|
14
|
+
log_1.Log.verbose(logLevel, 'Using WebFS writer because it is supported');
|
|
15
|
+
return web_fs_1.webFsWriter;
|
|
16
|
+
}
|
|
17
|
+
log_1.Log.verbose(logLevel, 'Using buffer writer because WebFS writer is not supported');
|
|
18
|
+
return buffer_1.bufferWriter;
|
|
19
|
+
};
|
|
20
|
+
exports.autoSelectWriter = autoSelectWriter;
|
|
@@ -0,0 +1,10 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.calculateProgress = void 0;
|
|
4
|
+
const calculateProgress = ({ millisecondsWritten, expectedOutputMilliseconds, }) => {
|
|
5
|
+
if (expectedOutputMilliseconds === null) {
|
|
6
|
+
return null;
|
|
7
|
+
}
|
|
8
|
+
return millisecondsWritten / expectedOutputMilliseconds;
|
|
9
|
+
};
|
|
10
|
+
exports.calculateProgress = calculateProgress;
|
package/dist/convert-media.d.ts
CHANGED
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import type { VideoTrack } from '@remotion/media-parser';
|
|
1
|
+
import type { LogLevel, Options, ParseMediaDynamicOptions, ParseMediaFields, ParseMediaOptions, VideoTrack, WriterInterface } from '@remotion/media-parser';
|
|
2
2
|
import type { ConvertMediaAudioCodec, ConvertMediaVideoCodec } from './codec-id';
|
|
3
3
|
import { type ResolveAudioActionFn } from './resolve-audio-action';
|
|
4
4
|
import { type ResolveVideoActionFn } from './resolve-video-action';
|
|
@@ -7,14 +7,18 @@ export type ConvertMediaState = {
|
|
|
7
7
|
decodedAudioFrames: number;
|
|
8
8
|
encodedVideoFrames: number;
|
|
9
9
|
encodedAudioFrames: number;
|
|
10
|
+
bytesWritten: number;
|
|
11
|
+
millisecondsWritten: number;
|
|
12
|
+
expectedOutputMilliseconds: number | null;
|
|
13
|
+
overallProgress: number | null;
|
|
10
14
|
};
|
|
11
15
|
export type ConvertMediaTo = 'webm';
|
|
12
16
|
export type ConvertMediaResult = {
|
|
13
17
|
save: () => Promise<File>;
|
|
14
18
|
remove: () => Promise<void>;
|
|
15
19
|
};
|
|
16
|
-
export declare const convertMedia: ({ src, onVideoFrame, onMediaStateUpdate, audioCodec, to, videoCodec, signal: userPassedAbortSignal, onAudioTrack: userAudioResolver, onVideoTrack: userVideoResolver, }: {
|
|
17
|
-
src:
|
|
20
|
+
export declare const convertMedia: <F extends Options<ParseMediaFields>>({ src, onVideoFrame, onMediaStateUpdate: onMediaStateDoNoCallDirectly, audioCodec, to, videoCodec, signal: userPassedAbortSignal, onAudioTrack: userAudioResolver, onVideoTrack: userVideoResolver, reader, fields, logLevel, writer, ...more }: {
|
|
21
|
+
src: ParseMediaOptions<F>["src"];
|
|
18
22
|
to: ConvertMediaTo;
|
|
19
23
|
onVideoFrame?: (inputFrame: VideoFrame, track: VideoTrack) => Promise<void>;
|
|
20
24
|
onMediaStateUpdate?: (state: ConvertMediaState) => void;
|
|
@@ -23,4 +27,7 @@ export declare const convertMedia: ({ src, onVideoFrame, onMediaStateUpdate, aud
|
|
|
23
27
|
signal?: AbortSignal;
|
|
24
28
|
onAudioTrack?: ResolveAudioActionFn;
|
|
25
29
|
onVideoTrack?: ResolveVideoActionFn;
|
|
26
|
-
|
|
30
|
+
reader?: ParseMediaOptions<F>["reader"];
|
|
31
|
+
logLevel?: LogLevel;
|
|
32
|
+
writer?: WriterInterface;
|
|
33
|
+
} & ParseMediaDynamicOptions<F>) => Promise<ConvertMediaResult>;
|
package/dist/convert-media.js
CHANGED
|
@@ -5,15 +5,18 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
|
5
5
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
6
|
exports.convertMedia = void 0;
|
|
7
7
|
const media_parser_1 = require("@remotion/media-parser");
|
|
8
|
-
const
|
|
9
|
-
const
|
|
8
|
+
const auto_select_writer_1 = require("./auto-select-writer");
|
|
9
|
+
const calculate_progress_1 = require("./calculate-progress");
|
|
10
10
|
const error_cause_1 = __importDefault(require("./error-cause"));
|
|
11
11
|
const on_audio_track_1 = require("./on-audio-track");
|
|
12
12
|
const on_video_track_1 = require("./on-video-track");
|
|
13
13
|
const resolve_audio_action_1 = require("./resolve-audio-action");
|
|
14
14
|
const resolve_video_action_1 = require("./resolve-video-action");
|
|
15
15
|
const with_resolvers_1 = require("./with-resolvers");
|
|
16
|
-
const convertMedia = async ({ src, onVideoFrame, onMediaStateUpdate, audioCodec, to, videoCodec, signal: userPassedAbortSignal, onAudioTrack: userAudioResolver, onVideoTrack: userVideoResolver, })
|
|
16
|
+
const convertMedia = async function ({ src, onVideoFrame, onMediaStateUpdate: onMediaStateDoNoCallDirectly, audioCodec, to, videoCodec, signal: userPassedAbortSignal, onAudioTrack: userAudioResolver, onVideoTrack: userVideoResolver, reader, fields, logLevel = 'info', writer, ...more }) {
|
|
17
|
+
if (userPassedAbortSignal === null || userPassedAbortSignal === void 0 ? void 0 : userPassedAbortSignal.aborted) {
|
|
18
|
+
return Promise.reject(new error_cause_1.default('Aborted'));
|
|
19
|
+
}
|
|
17
20
|
if (to !== 'webm') {
|
|
18
21
|
return Promise.reject(new TypeError('Only `to: "webm"` is supported currently'));
|
|
19
22
|
}
|
|
@@ -23,7 +26,7 @@ const convertMedia = async ({ src, onVideoFrame, onMediaStateUpdate, audioCodec,
|
|
|
23
26
|
if (videoCodec !== 'vp8' && videoCodec !== 'vp9') {
|
|
24
27
|
return Promise.reject(new TypeError('Only `videoCodec: "vp8"` and `videoCodec: "vp9"` are supported currently'));
|
|
25
28
|
}
|
|
26
|
-
const {
|
|
29
|
+
const { resolve, reject, getPromiseToImmediatelyReturn } = (0, with_resolvers_1.withResolversAndWaitForReturn)();
|
|
27
30
|
const controller = new AbortController();
|
|
28
31
|
const abortConversion = (errCause) => {
|
|
29
32
|
reject(errCause);
|
|
@@ -40,9 +43,34 @@ const convertMedia = async ({ src, onVideoFrame, onMediaStateUpdate, audioCodec,
|
|
|
40
43
|
decodedVideoFrames: 0,
|
|
41
44
|
encodedVideoFrames: 0,
|
|
42
45
|
encodedAudioFrames: 0,
|
|
46
|
+
bytesWritten: 0,
|
|
47
|
+
millisecondsWritten: 0,
|
|
48
|
+
expectedOutputMilliseconds: null,
|
|
49
|
+
overallProgress: 0,
|
|
50
|
+
};
|
|
51
|
+
const onMediaStateUpdate = (newState) => {
|
|
52
|
+
if (controller.signal.aborted) {
|
|
53
|
+
return;
|
|
54
|
+
}
|
|
55
|
+
onMediaStateDoNoCallDirectly === null || onMediaStateDoNoCallDirectly === void 0 ? void 0 : onMediaStateDoNoCallDirectly(newState);
|
|
43
56
|
};
|
|
44
|
-
const
|
|
45
|
-
|
|
57
|
+
const state = await media_parser_1.MediaParserInternals.createMedia({
|
|
58
|
+
writer: await (0, auto_select_writer_1.autoSelectWriter)(writer, logLevel),
|
|
59
|
+
onBytesProgress: (bytesWritten) => {
|
|
60
|
+
convertMediaState.bytesWritten = bytesWritten;
|
|
61
|
+
onMediaStateUpdate === null || onMediaStateUpdate === void 0 ? void 0 : onMediaStateUpdate(convertMediaState);
|
|
62
|
+
},
|
|
63
|
+
onMillisecondsProgress: (millisecondsWritten) => {
|
|
64
|
+
if (millisecondsWritten > convertMediaState.millisecondsWritten) {
|
|
65
|
+
convertMediaState.millisecondsWritten = millisecondsWritten;
|
|
66
|
+
convertMediaState.overallProgress = (0, calculate_progress_1.calculateProgress)({
|
|
67
|
+
millisecondsWritten: convertMediaState.millisecondsWritten,
|
|
68
|
+
expectedOutputMilliseconds: convertMediaState.expectedOutputMilliseconds,
|
|
69
|
+
});
|
|
70
|
+
onMediaStateUpdate === null || onMediaStateUpdate === void 0 ? void 0 : onMediaStateUpdate(convertMediaState);
|
|
71
|
+
}
|
|
72
|
+
},
|
|
73
|
+
});
|
|
46
74
|
const onVideoTrack = (0, on_video_track_1.makeVideoTrackHandler)({
|
|
47
75
|
state,
|
|
48
76
|
onVideoFrame: onVideoFrame !== null && onVideoFrame !== void 0 ? onVideoFrame : null,
|
|
@@ -52,6 +80,7 @@ const convertMedia = async ({ src, onVideoFrame, onMediaStateUpdate, audioCodec,
|
|
|
52
80
|
controller,
|
|
53
81
|
videoCodec,
|
|
54
82
|
onVideoTrack: userVideoResolver !== null && userVideoResolver !== void 0 ? userVideoResolver : resolve_video_action_1.defaultResolveVideoAction,
|
|
83
|
+
logLevel,
|
|
55
84
|
});
|
|
56
85
|
const onAudioTrack = (0, on_audio_track_1.makeAudioTrackHandler)({
|
|
57
86
|
abortConversion,
|
|
@@ -62,12 +91,35 @@ const convertMedia = async ({ src, onVideoFrame, onMediaStateUpdate, audioCodec,
|
|
|
62
91
|
state,
|
|
63
92
|
onAudioTrack: userAudioResolver !== null && userAudioResolver !== void 0 ? userAudioResolver : resolve_audio_action_1.defaultResolveAudioAction,
|
|
64
93
|
bitrate: 128000,
|
|
94
|
+
logLevel,
|
|
65
95
|
});
|
|
66
96
|
(0, media_parser_1.parseMedia)({
|
|
67
97
|
src,
|
|
68
98
|
onVideoTrack,
|
|
69
99
|
onAudioTrack,
|
|
70
100
|
signal: controller.signal,
|
|
101
|
+
fields: {
|
|
102
|
+
...fields,
|
|
103
|
+
durationInSeconds: true,
|
|
104
|
+
},
|
|
105
|
+
reader,
|
|
106
|
+
...more,
|
|
107
|
+
onDurationInSeconds: (durationInSeconds) => {
|
|
108
|
+
if (durationInSeconds === null) {
|
|
109
|
+
return null;
|
|
110
|
+
}
|
|
111
|
+
const casted = more;
|
|
112
|
+
if (casted.onDurationInSeconds) {
|
|
113
|
+
casted.onDurationInSeconds(durationInSeconds);
|
|
114
|
+
}
|
|
115
|
+
const expectedOutputMilliseconds = durationInSeconds * 1000;
|
|
116
|
+
convertMediaState.expectedOutputMilliseconds = expectedOutputMilliseconds;
|
|
117
|
+
convertMediaState.overallProgress = (0, calculate_progress_1.calculateProgress)({
|
|
118
|
+
millisecondsWritten: convertMediaState.millisecondsWritten,
|
|
119
|
+
expectedOutputMilliseconds,
|
|
120
|
+
});
|
|
121
|
+
onMediaStateUpdate(convertMediaState);
|
|
122
|
+
},
|
|
71
123
|
})
|
|
72
124
|
.then(() => {
|
|
73
125
|
return state.waitForFinish();
|
|
@@ -77,10 +129,9 @@ const convertMedia = async ({ src, onVideoFrame, onMediaStateUpdate, audioCodec,
|
|
|
77
129
|
})
|
|
78
130
|
.catch((err) => {
|
|
79
131
|
reject(err);
|
|
80
|
-
})
|
|
81
|
-
|
|
132
|
+
});
|
|
133
|
+
return getPromiseToImmediatelyReturn().finally(() => {
|
|
82
134
|
userPassedAbortSignal === null || userPassedAbortSignal === void 0 ? void 0 : userPassedAbortSignal.removeEventListener('abort', onUserAbort);
|
|
83
135
|
});
|
|
84
|
-
return promise;
|
|
85
136
|
};
|
|
86
137
|
exports.convertMedia = convertMedia;
|