@remotion/webcodecs 4.0.248 → 4.0.250
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/auto-select-writer.js +5 -0
- package/dist/can-reencode-video-track.d.ts +4 -1
- package/dist/can-reencode-video-track.js +11 -3
- package/dist/convert-media.d.ts +7 -2
- package/dist/convert-media.js +2 -1
- package/dist/create/matroska/matroska-utils.d.ts +1 -1
- package/dist/create/wav/create-wav.js +1 -1
- package/dist/default-on-video-track-handler.js +2 -0
- package/dist/esm/index.mjs +48 -10
- package/dist/esm/node-writer.mjs +92 -0
- package/dist/esm/node.mjs +71 -0
- package/dist/index.d.ts +1 -1
- package/dist/on-audio-track.d.ts +3 -1
- package/dist/on-audio-track.js +25 -4
- package/dist/select-container-creator.d.ts +1 -1
- package/dist/test/create-matroska.test.js +2 -0
- package/dist/test/remux-serverside.d.ts +1 -0
- package/dist/test/remux-serverside.js +12 -0
- package/dist/test/remux-serverside.test.d.ts +1 -0
- package/dist/test/remux-serverside.test.js +19 -0
- package/dist/test/stsd.test.js +4 -5
- package/dist/writers/fs.d.ts +4 -0
- package/dist/writers/fs.js +78 -0
- package/dist/writers/node-writer.d.ts +4 -0
- package/dist/writers/node-writer.js +77 -0
- package/dist/writers/node.d.ts +4 -0
- package/dist/writers/node.js +74 -0
- package/package.json +14 -5
|
@@ -11,6 +11,11 @@ const autoSelectWriter = async (writer, logLevel) => {
|
|
|
11
11
|
return writer;
|
|
12
12
|
}
|
|
13
13
|
log_1.Log.verbose(logLevel, 'Determining best writer');
|
|
14
|
+
const hasNavigator = typeof navigator !== 'undefined';
|
|
15
|
+
if (!hasNavigator) {
|
|
16
|
+
log_1.Log.verbose(logLevel, 'No navigator API detected, using buffer writer');
|
|
17
|
+
return buffer_1.bufferWriter;
|
|
18
|
+
}
|
|
14
19
|
// Check if we're offline using the navigator API
|
|
15
20
|
const isOffline = !navigator.onLine;
|
|
16
21
|
if (isOffline) {
|
|
@@ -1,6 +1,9 @@
|
|
|
1
1
|
import type { VideoTrack } from '@remotion/media-parser';
|
|
2
2
|
import type { ConvertMediaVideoCodec } from './get-available-video-codecs';
|
|
3
|
-
|
|
3
|
+
import type { ResizeOperation } from './resizing/mode';
|
|
4
|
+
export declare const canReencodeVideoTrack: ({ videoCodec, track, resizeOperation, rotate, }: {
|
|
4
5
|
videoCodec: ConvertMediaVideoCodec;
|
|
5
6
|
track: VideoTrack;
|
|
7
|
+
resizeOperation: ResizeOperation | null;
|
|
8
|
+
rotate: number | null;
|
|
6
9
|
}) => Promise<boolean>;
|
|
@@ -1,13 +1,21 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
3
|
exports.canReencodeVideoTrack = void 0;
|
|
4
|
+
const rotation_1 = require("./rotation");
|
|
4
5
|
const video_decoder_config_1 = require("./video-decoder-config");
|
|
5
6
|
const video_encoder_config_1 = require("./video-encoder-config");
|
|
6
|
-
const canReencodeVideoTrack = async ({ videoCodec, track, }) => {
|
|
7
|
-
const
|
|
8
|
-
codec: videoCodec,
|
|
7
|
+
const canReencodeVideoTrack = async ({ videoCodec, track, resizeOperation, rotate, }) => {
|
|
8
|
+
const { height, width } = (0, rotation_1.calculateNewDimensionsFromRotateAndScale)({
|
|
9
9
|
height: track.displayAspectHeight,
|
|
10
|
+
resizeOperation,
|
|
11
|
+
rotation: rotate !== null && rotate !== void 0 ? rotate : 0,
|
|
12
|
+
videoCodec,
|
|
10
13
|
width: track.displayAspectWidth,
|
|
14
|
+
});
|
|
15
|
+
const videoEncoderConfig = await (0, video_encoder_config_1.getVideoEncoderConfig)({
|
|
16
|
+
codec: videoCodec,
|
|
17
|
+
height,
|
|
18
|
+
width,
|
|
11
19
|
fps: track.fps,
|
|
12
20
|
});
|
|
13
21
|
const videoDecoderConfig = await (0, video_decoder_config_1.getVideoDecoderConfigWithHardwareAcceleration)(track);
|
package/dist/convert-media.d.ts
CHANGED
|
@@ -2,7 +2,7 @@
|
|
|
2
2
|
* Copyright (c) 2025 Remotion AG
|
|
3
3
|
* For licensing, see: https://remotion.dev/docs/webcodecs#license
|
|
4
4
|
*/
|
|
5
|
-
import type { LogLevel, Options, ParseMediaDynamicOptions, ParseMediaFields, ParseMediaOptions, VideoTrack } from '@remotion/media-parser';
|
|
5
|
+
import type { AudioTrack, LogLevel, Options, ParseMediaDynamicOptions, ParseMediaFields, ParseMediaOptions, VideoTrack } from '@remotion/media-parser';
|
|
6
6
|
import type { ConvertMediaAudioCodec } from './get-available-audio-codecs';
|
|
7
7
|
import type { ConvertMediaContainer } from './get-available-containers';
|
|
8
8
|
import type { ConvertMediaVideoCodec } from './get-available-video-codecs';
|
|
@@ -30,10 +30,15 @@ export type ConvertMediaOnVideoFrame = (options: {
|
|
|
30
30
|
frame: VideoFrame;
|
|
31
31
|
track: VideoTrack;
|
|
32
32
|
}) => Promise<VideoFrame> | VideoFrame;
|
|
33
|
-
export
|
|
33
|
+
export type ConvertMediaOnAudioData = (options: {
|
|
34
|
+
audioData: AudioData;
|
|
35
|
+
track: AudioTrack;
|
|
36
|
+
}) => Promise<AudioData> | AudioData;
|
|
37
|
+
export declare const convertMedia: <F extends Options<ParseMediaFields>>({ src, onVideoFrame, onAudioData, onProgress: onProgressDoNotCallDirectly, audioCodec, container, videoCodec, signal: userPassedAbortSignal, onAudioTrack: userAudioResolver, onVideoTrack: userVideoResolver, reader, fields, logLevel, writer, progressIntervalInMs, rotate, apiKey, resize, ...more }: {
|
|
34
38
|
src: ParseMediaOptions<F>["src"];
|
|
35
39
|
container: ConvertMediaContainer;
|
|
36
40
|
onVideoFrame?: ConvertMediaOnVideoFrame;
|
|
41
|
+
onAudioData?: ConvertMediaOnAudioData;
|
|
37
42
|
onProgress?: ConvertMediaOnProgress;
|
|
38
43
|
videoCodec?: ConvertMediaVideoCodec;
|
|
39
44
|
audioCodec?: ConvertMediaAudioCodec;
|
package/dist/convert-media.js
CHANGED
|
@@ -21,7 +21,7 @@ const on_video_track_1 = require("./on-video-track");
|
|
|
21
21
|
const select_container_creator_1 = require("./select-container-creator");
|
|
22
22
|
const send_telemetry_event_1 = require("./send-telemetry-event");
|
|
23
23
|
const throttled_state_update_1 = require("./throttled-state-update");
|
|
24
|
-
const convertMedia = async function ({ src, onVideoFrame, onProgress: onProgressDoNotCallDirectly, audioCodec, container, videoCodec, signal: userPassedAbortSignal, onAudioTrack: userAudioResolver, onVideoTrack: userVideoResolver, reader, fields, logLevel = 'info', writer, progressIntervalInMs, rotate, apiKey, resize, ...more }) {
|
|
24
|
+
const convertMedia = async function ({ src, onVideoFrame, onAudioData, onProgress: onProgressDoNotCallDirectly, audioCodec, container, videoCodec, signal: userPassedAbortSignal, onAudioTrack: userAudioResolver, onVideoTrack: userVideoResolver, reader, fields, logLevel = 'info', writer, progressIntervalInMs, rotate, apiKey, resize, ...more }) {
|
|
25
25
|
var _a, _b;
|
|
26
26
|
if (userPassedAbortSignal === null || userPassedAbortSignal === void 0 ? void 0 : userPassedAbortSignal.aborted) {
|
|
27
27
|
return Promise.reject(new error_cause_1.default('Aborted'));
|
|
@@ -106,6 +106,7 @@ const convertMedia = async function ({ src, onVideoFrame, onProgress: onProgress
|
|
|
106
106
|
logLevel,
|
|
107
107
|
outputContainer: container,
|
|
108
108
|
progressTracker,
|
|
109
|
+
onAudioData: onAudioData !== null && onAudioData !== void 0 ? onAudioData : null,
|
|
109
110
|
});
|
|
110
111
|
(0, media_parser_1.parseMedia)({
|
|
111
112
|
logLevel,
|
|
@@ -19,7 +19,7 @@ export type EbmlParsedOrUint8Array<T extends Ebml> = {
|
|
|
19
19
|
value: EbmlValueOrUint8Array<T>;
|
|
20
20
|
minVintWidth: number | null;
|
|
21
21
|
};
|
|
22
|
-
export declare const measureEBMLVarInt: (value: number) => 2 | 1 |
|
|
22
|
+
export declare const measureEBMLVarInt: (value: number) => 2 | 1 | 6 | 5 | 3 | 4;
|
|
23
23
|
export declare const getVariableInt: (value: number, minWidth: number | null) => Uint8Array;
|
|
24
24
|
export declare const makeMatroskaBytes: (fields: PossibleEbmlOrUint8Array) => BytesAndOffset;
|
|
25
25
|
export type PossibleEbmlOrUint8Array = Prettify<{
|
|
@@ -57,7 +57,7 @@ const createWav = async ({ filename, logLevel, onBytesProgress, onMillisecondsPr
|
|
|
57
57
|
};
|
|
58
58
|
const addSample = async (chunk) => {
|
|
59
59
|
var _a;
|
|
60
|
-
log_1.Log.
|
|
60
|
+
log_1.Log.trace(logLevel, 'Adding sample', chunk);
|
|
61
61
|
await w.write(chunk.data);
|
|
62
62
|
onMillisecondsProgress((chunk.timestamp + ((_a = chunk.duration) !== null && _a !== void 0 ? _a : 0)) / 1000);
|
|
63
63
|
onBytesProgress(w.getWrittenByteCount());
|
|
@@ -15,6 +15,8 @@ const defaultOnVideoTrackHandler = async ({ track, defaultVideoCodec, logLevel,
|
|
|
15
15
|
const canReencode = await (0, can_reencode_video_track_1.canReencodeVideoTrack)({
|
|
16
16
|
videoCodec: defaultVideoCodec,
|
|
17
17
|
track,
|
|
18
|
+
resizeOperation,
|
|
19
|
+
rotate,
|
|
18
20
|
});
|
|
19
21
|
if (canReencode) {
|
|
20
22
|
media_parser_1.MediaParserInternals.Log.verbose(logLevel, `Track ${track.trackId} (video): Cannot copy, but re-enconde, therefore re-encoding`);
|
package/dist/esm/index.mjs
CHANGED
|
@@ -1082,12 +1082,21 @@ var getVideoEncoderConfig = async ({
|
|
|
1082
1082
|
// src/can-reencode-video-track.ts
|
|
1083
1083
|
var canReencodeVideoTrack = async ({
|
|
1084
1084
|
videoCodec,
|
|
1085
|
-
track
|
|
1085
|
+
track,
|
|
1086
|
+
resizeOperation,
|
|
1087
|
+
rotate
|
|
1086
1088
|
}) => {
|
|
1089
|
+
const { height, width } = calculateNewDimensionsFromRotateAndScale({
|
|
1090
|
+
height: track.displayAspectHeight,
|
|
1091
|
+
resizeOperation,
|
|
1092
|
+
rotation: rotate ?? 0,
|
|
1093
|
+
videoCodec,
|
|
1094
|
+
width: track.displayAspectWidth
|
|
1095
|
+
});
|
|
1087
1096
|
const videoEncoderConfig = await getVideoEncoderConfig({
|
|
1088
1097
|
codec: videoCodec,
|
|
1089
|
-
height
|
|
1090
|
-
width
|
|
1098
|
+
height,
|
|
1099
|
+
width,
|
|
1091
1100
|
fps: track.fps
|
|
1092
1101
|
});
|
|
1093
1102
|
const videoDecoderConfig = await getVideoDecoderConfigWithHardwareAcceleration(track);
|
|
@@ -1103,6 +1112,11 @@ var autoSelectWriter = async (writer, logLevel) => {
|
|
|
1103
1112
|
return writer;
|
|
1104
1113
|
}
|
|
1105
1114
|
Log.verbose(logLevel, "Determining best writer");
|
|
1115
|
+
const hasNavigator = typeof navigator !== "undefined";
|
|
1116
|
+
if (!hasNavigator) {
|
|
1117
|
+
Log.verbose(logLevel, "No navigator API detected, using buffer writer");
|
|
1118
|
+
return bufferWriter;
|
|
1119
|
+
}
|
|
1106
1120
|
const isOffline = !navigator.onLine;
|
|
1107
1121
|
if (isOffline) {
|
|
1108
1122
|
Log.verbose(logLevel, "Offline mode detected, using buffer writer");
|
|
@@ -1288,7 +1302,8 @@ var makeAudioTrackHandler = ({
|
|
|
1288
1302
|
onAudioTrack,
|
|
1289
1303
|
logLevel,
|
|
1290
1304
|
outputContainer,
|
|
1291
|
-
progressTracker
|
|
1305
|
+
progressTracker,
|
|
1306
|
+
onAudioData
|
|
1292
1307
|
}) => async ({ track, container: inputContainer }) => {
|
|
1293
1308
|
const canCopyTrack = canCopyAudioTrack({
|
|
1294
1309
|
inputCodec: track.codecWithoutConfig,
|
|
@@ -1397,15 +1412,34 @@ var makeAudioTrackHandler = ({
|
|
|
1397
1412
|
progressTracker
|
|
1398
1413
|
});
|
|
1399
1414
|
const audioDecoder = createAudioDecoder({
|
|
1400
|
-
onFrame: async (
|
|
1401
|
-
await
|
|
1415
|
+
onFrame: async (audioData) => {
|
|
1416
|
+
const newAudioData = onAudioData ? await onAudioData?.({ audioData, track }) : audioData;
|
|
1417
|
+
if (newAudioData !== audioData) {
|
|
1418
|
+
if (newAudioData.duration !== audioData.duration) {
|
|
1419
|
+
throw new error_cause_default(`onAudioData returned a different duration than the input audio data. Original duration: ${audioData.duration}, new duration: ${newAudioData.duration}`);
|
|
1420
|
+
}
|
|
1421
|
+
if (newAudioData.numberOfChannels !== audioData.numberOfChannels) {
|
|
1422
|
+
throw new error_cause_default(`onAudioData returned a different number of channels than the input audio data. Original channels: ${audioData.numberOfChannels}, new channels: ${newAudioData.numberOfChannels}`);
|
|
1423
|
+
}
|
|
1424
|
+
if (newAudioData.sampleRate !== audioData.sampleRate) {
|
|
1425
|
+
throw new error_cause_default(`onAudioData returned a different sample rate than the input audio data. Original sample rate: ${audioData.sampleRate}, new sample rate: ${newAudioData.sampleRate}`);
|
|
1426
|
+
}
|
|
1427
|
+
if (newAudioData.format !== audioData.format) {
|
|
1428
|
+
throw new error_cause_default(`onAudioData returned a different format than the input audio data. Original format: ${audioData.format}, new format: ${newAudioData.format}`);
|
|
1429
|
+
}
|
|
1430
|
+
if (newAudioData.timestamp !== audioData.timestamp) {
|
|
1431
|
+
throw new error_cause_default(`onAudioData returned a different timestamp than the input audio data. Original timestamp: ${audioData.timestamp}, new timestamp: ${newAudioData.timestamp}`);
|
|
1432
|
+
}
|
|
1433
|
+
audioData.close();
|
|
1434
|
+
}
|
|
1435
|
+
await audioEncoder.encodeFrame(newAudioData);
|
|
1402
1436
|
onMediaStateUpdate?.((prevState) => {
|
|
1403
1437
|
return {
|
|
1404
1438
|
...prevState,
|
|
1405
1439
|
decodedAudioFrames: prevState.decodedAudioFrames + 1
|
|
1406
1440
|
};
|
|
1407
1441
|
});
|
|
1408
|
-
|
|
1442
|
+
newAudioData.close();
|
|
1409
1443
|
},
|
|
1410
1444
|
onError(error) {
|
|
1411
1445
|
abortConversion(new error_cause_default(`Audio decoder of track ${track.trackId} failed. Config: ${JSON.stringify(audioDecoderConfig)} (see .cause of this error)`, {
|
|
@@ -1454,7 +1488,9 @@ var defaultOnVideoTrackHandler = async ({
|
|
|
1454
1488
|
}
|
|
1455
1489
|
const canReencode = await canReencodeVideoTrack({
|
|
1456
1490
|
videoCodec: defaultVideoCodec,
|
|
1457
|
-
track
|
|
1491
|
+
track,
|
|
1492
|
+
resizeOperation,
|
|
1493
|
+
rotate
|
|
1458
1494
|
});
|
|
1459
1495
|
if (canReencode) {
|
|
1460
1496
|
MediaParserInternals4.Log.verbose(logLevel, `Track ${track.trackId} (video): Cannot copy, but re-enconde, therefore re-encoding`);
|
|
@@ -4134,7 +4170,7 @@ var createWav = async ({
|
|
|
4134
4170
|
await w.updateDataAt(blockAlignPosition, new Uint8Array(numberTo16BitLittleEndian(numberOfChannels * BYTES_PER_SAMPLE)));
|
|
4135
4171
|
};
|
|
4136
4172
|
const addSample = async (chunk) => {
|
|
4137
|
-
Log.
|
|
4173
|
+
Log.trace(logLevel, "Adding sample", chunk);
|
|
4138
4174
|
await w.write(chunk.data);
|
|
4139
4175
|
onMillisecondsProgress((chunk.timestamp + (chunk.duration ?? 0)) / 1000);
|
|
4140
4176
|
onBytesProgress(w.getWrittenByteCount());
|
|
@@ -4273,6 +4309,7 @@ var throttledStateUpdate = ({
|
|
|
4273
4309
|
var convertMedia = async function({
|
|
4274
4310
|
src,
|
|
4275
4311
|
onVideoFrame,
|
|
4312
|
+
onAudioData,
|
|
4276
4313
|
onProgress: onProgressDoNotCallDirectly,
|
|
4277
4314
|
audioCodec,
|
|
4278
4315
|
container,
|
|
@@ -4370,7 +4407,8 @@ var convertMedia = async function({
|
|
|
4370
4407
|
onAudioTrack: userAudioResolver ?? null,
|
|
4371
4408
|
logLevel,
|
|
4372
4409
|
outputContainer: container,
|
|
4373
|
-
progressTracker
|
|
4410
|
+
progressTracker,
|
|
4411
|
+
onAudioData: onAudioData ?? null
|
|
4374
4412
|
});
|
|
4375
4413
|
parseMedia({
|
|
4376
4414
|
logLevel,
|
|
@@ -0,0 +1,92 @@
|
|
|
1
|
+
var __create = Object.create;
|
|
2
|
+
var __getProtoOf = Object.getPrototypeOf;
|
|
3
|
+
var __defProp = Object.defineProperty;
|
|
4
|
+
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
5
|
+
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
|
6
|
+
var __toESM = (mod, isNodeMode, target) => {
|
|
7
|
+
target = mod != null ? __create(__getProtoOf(mod)) : {};
|
|
8
|
+
const to = isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target;
|
|
9
|
+
for (let key of __getOwnPropNames(mod))
|
|
10
|
+
if (!__hasOwnProp.call(to, key))
|
|
11
|
+
__defProp(to, key, {
|
|
12
|
+
get: () => mod[key],
|
|
13
|
+
enumerable: true
|
|
14
|
+
});
|
|
15
|
+
return to;
|
|
16
|
+
};
|
|
17
|
+
var __commonJS = (cb, mod) => () => (mod || cb((mod = { exports: {} }).exports, mod), mod.exports);
|
|
18
|
+
|
|
19
|
+
// src/writers/node-writer.ts
|
|
20
|
+
var { default: fs} = (() => ({}));
|
|
21
|
+
var createContent = (filename) => {
|
|
22
|
+
return async () => {
|
|
23
|
+
const writPromise = Promise.resolve();
|
|
24
|
+
const remove = async () => {
|
|
25
|
+
await fs.promises.unlink(filename).catch(() => {
|
|
26
|
+
});
|
|
27
|
+
};
|
|
28
|
+
await remove();
|
|
29
|
+
const writeStream = await new Promise((resolve, reject) => {
|
|
30
|
+
fs.open(filename, "w", (err, fd) => {
|
|
31
|
+
if (err) {
|
|
32
|
+
reject(err);
|
|
33
|
+
return;
|
|
34
|
+
}
|
|
35
|
+
resolve(fd);
|
|
36
|
+
});
|
|
37
|
+
});
|
|
38
|
+
let written = 0;
|
|
39
|
+
const write = async (arr) => {
|
|
40
|
+
await new Promise((resolve, reject) => {
|
|
41
|
+
fs.write(writeStream, arr, (err) => {
|
|
42
|
+
if (err) {
|
|
43
|
+
reject(err);
|
|
44
|
+
}
|
|
45
|
+
resolve();
|
|
46
|
+
});
|
|
47
|
+
});
|
|
48
|
+
written += arr.byteLength;
|
|
49
|
+
};
|
|
50
|
+
const updateDataAt = async (position, data) => {
|
|
51
|
+
return new Promise((resolve, reject) => {
|
|
52
|
+
fs.write(writeStream, data, 0, data.length, position, (err) => {
|
|
53
|
+
if (err) {
|
|
54
|
+
reject(err);
|
|
55
|
+
}
|
|
56
|
+
resolve();
|
|
57
|
+
});
|
|
58
|
+
});
|
|
59
|
+
};
|
|
60
|
+
const writer = {
|
|
61
|
+
write: (arr) => {
|
|
62
|
+
writPromise.then(() => write(arr));
|
|
63
|
+
return writPromise;
|
|
64
|
+
},
|
|
65
|
+
updateDataAt: (position, data) => {
|
|
66
|
+
writPromise.then(() => updateDataAt(position, data));
|
|
67
|
+
return writPromise;
|
|
68
|
+
},
|
|
69
|
+
waitForFinish: async () => {
|
|
70
|
+
await writPromise;
|
|
71
|
+
},
|
|
72
|
+
getWrittenByteCount: () => written,
|
|
73
|
+
remove,
|
|
74
|
+
save: async () => {
|
|
75
|
+
try {
|
|
76
|
+
fs.closeSync(writeStream);
|
|
77
|
+
const file = await fs.promises.readFile(filename);
|
|
78
|
+
return new Blob([file]);
|
|
79
|
+
} catch (e) {
|
|
80
|
+
return Promise.reject(e);
|
|
81
|
+
}
|
|
82
|
+
}
|
|
83
|
+
};
|
|
84
|
+
return writer;
|
|
85
|
+
};
|
|
86
|
+
};
|
|
87
|
+
var nodeWriter = (path) => {
|
|
88
|
+
return { createContent: createContent(path) };
|
|
89
|
+
};
|
|
90
|
+
export {
|
|
91
|
+
nodeWriter
|
|
92
|
+
};
|
|
@@ -0,0 +1,71 @@
|
|
|
1
|
+
// src/writers/node.ts
|
|
2
|
+
import fs from "node:fs";
|
|
3
|
+
var createContent = (filename) => {
|
|
4
|
+
return async () => {
|
|
5
|
+
const writPromise = Promise.resolve();
|
|
6
|
+
const remove = async () => {
|
|
7
|
+
await fs.promises.unlink(filename).catch(() => {
|
|
8
|
+
});
|
|
9
|
+
};
|
|
10
|
+
await remove();
|
|
11
|
+
if (!fs.existsSync(filename)) {
|
|
12
|
+
fs.writeFileSync(filename, "");
|
|
13
|
+
}
|
|
14
|
+
const writeStream = fs.openSync(filename, "w");
|
|
15
|
+
let written = 0;
|
|
16
|
+
const write = async (data) => {
|
|
17
|
+
await new Promise((resolve, reject) => {
|
|
18
|
+
fs.write(writeStream, data, 0, data.length, undefined, (err) => {
|
|
19
|
+
if (err) {
|
|
20
|
+
reject(err);
|
|
21
|
+
return;
|
|
22
|
+
}
|
|
23
|
+
resolve();
|
|
24
|
+
});
|
|
25
|
+
});
|
|
26
|
+
written += data.byteLength;
|
|
27
|
+
};
|
|
28
|
+
const updateDataAt = (position, data) => {
|
|
29
|
+
return new Promise((resolve, reject) => {
|
|
30
|
+
fs.write(writeStream, data, 0, data.length, position, (err) => {
|
|
31
|
+
if (err) {
|
|
32
|
+
reject(err);
|
|
33
|
+
return;
|
|
34
|
+
}
|
|
35
|
+
resolve();
|
|
36
|
+
});
|
|
37
|
+
});
|
|
38
|
+
};
|
|
39
|
+
const writer = {
|
|
40
|
+
write: (arr) => {
|
|
41
|
+
writPromise.then(() => write(arr));
|
|
42
|
+
return writPromise;
|
|
43
|
+
},
|
|
44
|
+
updateDataAt: (position, data) => {
|
|
45
|
+
writPromise.then(() => updateDataAt(position, data));
|
|
46
|
+
return writPromise;
|
|
47
|
+
},
|
|
48
|
+
waitForFinish: async () => {
|
|
49
|
+
await writPromise;
|
|
50
|
+
},
|
|
51
|
+
getWrittenByteCount: () => written,
|
|
52
|
+
remove,
|
|
53
|
+
save: async () => {
|
|
54
|
+
try {
|
|
55
|
+
fs.closeSync(writeStream);
|
|
56
|
+
const file = await fs.promises.readFile(filename);
|
|
57
|
+
return new Blob([file]);
|
|
58
|
+
} catch (e) {
|
|
59
|
+
return Promise.reject(e);
|
|
60
|
+
}
|
|
61
|
+
}
|
|
62
|
+
};
|
|
63
|
+
return writer;
|
|
64
|
+
};
|
|
65
|
+
};
|
|
66
|
+
var nodeWriter = (path) => {
|
|
67
|
+
return { createContent: createContent(path) };
|
|
68
|
+
};
|
|
69
|
+
export {
|
|
70
|
+
nodeWriter
|
|
71
|
+
};
|
package/dist/index.d.ts
CHANGED
|
@@ -4,7 +4,7 @@ export { canCopyAudioTrack } from './can-copy-audio-track';
|
|
|
4
4
|
export { canCopyVideoTrack } from './can-copy-video-track';
|
|
5
5
|
export { canReencodeAudioTrack } from './can-reencode-audio-track';
|
|
6
6
|
export { canReencodeVideoTrack } from './can-reencode-video-track';
|
|
7
|
-
export { convertMedia, ConvertMediaOnProgress, ConvertMediaOnVideoFrame, ConvertMediaProgress, ConvertMediaResult, } from './convert-media';
|
|
7
|
+
export { convertMedia, ConvertMediaOnAudioData, ConvertMediaOnProgress, ConvertMediaOnVideoFrame, ConvertMediaProgress, ConvertMediaResult, } from './convert-media';
|
|
8
8
|
export { defaultOnAudioTrackHandler } from './default-on-audio-track-handler';
|
|
9
9
|
export { defaultOnVideoTrackHandler } from './default-on-video-track-handler';
|
|
10
10
|
export { ConvertMediaAudioCodec, getAvailableAudioCodecs, } from './get-available-audio-codecs';
|
package/dist/on-audio-track.d.ts
CHANGED
|
@@ -1,4 +1,5 @@
|
|
|
1
1
|
import { type LogLevel, type OnAudioTrack } from '@remotion/media-parser';
|
|
2
|
+
import type { ConvertMediaOnAudioData } from './convert-media';
|
|
2
3
|
import type { MediaFn } from './create/media-fn';
|
|
3
4
|
import type { ProgressTracker } from './create/progress-tracker';
|
|
4
5
|
import Error from './error-cause';
|
|
@@ -6,7 +7,7 @@ import type { ConvertMediaAudioCodec } from './get-available-audio-codecs';
|
|
|
6
7
|
import type { ConvertMediaContainer } from './get-available-containers';
|
|
7
8
|
import type { ConvertMediaOnAudioTrackHandler } from './on-audio-track-handler';
|
|
8
9
|
import type { ConvertMediaProgressFn } from './throttled-state-update';
|
|
9
|
-
export declare const makeAudioTrackHandler: ({ state, defaultAudioCodec: audioCodec, controller, abortConversion, onMediaStateUpdate, onAudioTrack, logLevel, outputContainer, progressTracker, }: {
|
|
10
|
+
export declare const makeAudioTrackHandler: ({ state, defaultAudioCodec: audioCodec, controller, abortConversion, onMediaStateUpdate, onAudioTrack, logLevel, outputContainer, progressTracker, onAudioData, }: {
|
|
10
11
|
state: MediaFn;
|
|
11
12
|
defaultAudioCodec: ConvertMediaAudioCodec | null;
|
|
12
13
|
controller: AbortController;
|
|
@@ -16,4 +17,5 @@ export declare const makeAudioTrackHandler: ({ state, defaultAudioCodec: audioCo
|
|
|
16
17
|
logLevel: LogLevel;
|
|
17
18
|
outputContainer: ConvertMediaContainer;
|
|
18
19
|
progressTracker: ProgressTracker;
|
|
20
|
+
onAudioData: ConvertMediaOnAudioData | null;
|
|
19
21
|
}) => OnAudioTrack;
|
package/dist/on-audio-track.js
CHANGED
|
@@ -15,7 +15,7 @@ const default_on_audio_track_handler_1 = require("./default-on-audio-track-handl
|
|
|
15
15
|
const error_cause_1 = __importDefault(require("./error-cause"));
|
|
16
16
|
const get_default_audio_codec_1 = require("./get-default-audio-codec");
|
|
17
17
|
const log_1 = require("./log");
|
|
18
|
-
const makeAudioTrackHandler = ({ state, defaultAudioCodec: audioCodec, controller, abortConversion, onMediaStateUpdate, onAudioTrack, logLevel, outputContainer, progressTracker, }) => async ({ track, container: inputContainer }) => {
|
|
18
|
+
const makeAudioTrackHandler = ({ state, defaultAudioCodec: audioCodec, controller, abortConversion, onMediaStateUpdate, onAudioTrack, logLevel, outputContainer, progressTracker, onAudioData, }) => async ({ track, container: inputContainer }) => {
|
|
19
19
|
const canCopyTrack = (0, can_copy_audio_track_1.canCopyAudioTrack)({
|
|
20
20
|
inputCodec: track.codecWithoutConfig,
|
|
21
21
|
outputContainer,
|
|
@@ -131,15 +131,36 @@ const makeAudioTrackHandler = ({ state, defaultAudioCodec: audioCodec, controlle
|
|
|
131
131
|
progressTracker,
|
|
132
132
|
});
|
|
133
133
|
const audioDecoder = (0, audio_decoder_1.createAudioDecoder)({
|
|
134
|
-
onFrame: async (
|
|
135
|
-
|
|
134
|
+
onFrame: async (audioData) => {
|
|
135
|
+
const newAudioData = onAudioData
|
|
136
|
+
? await (onAudioData === null || onAudioData === void 0 ? void 0 : onAudioData({ audioData, track }))
|
|
137
|
+
: audioData;
|
|
138
|
+
if (newAudioData !== audioData) {
|
|
139
|
+
if (newAudioData.duration !== audioData.duration) {
|
|
140
|
+
throw new error_cause_1.default(`onAudioData returned a different duration than the input audio data. Original duration: ${audioData.duration}, new duration: ${newAudioData.duration}`);
|
|
141
|
+
}
|
|
142
|
+
if (newAudioData.numberOfChannels !== audioData.numberOfChannels) {
|
|
143
|
+
throw new error_cause_1.default(`onAudioData returned a different number of channels than the input audio data. Original channels: ${audioData.numberOfChannels}, new channels: ${newAudioData.numberOfChannels}`);
|
|
144
|
+
}
|
|
145
|
+
if (newAudioData.sampleRate !== audioData.sampleRate) {
|
|
146
|
+
throw new error_cause_1.default(`onAudioData returned a different sample rate than the input audio data. Original sample rate: ${audioData.sampleRate}, new sample rate: ${newAudioData.sampleRate}`);
|
|
147
|
+
}
|
|
148
|
+
if (newAudioData.format !== audioData.format) {
|
|
149
|
+
throw new error_cause_1.default(`onAudioData returned a different format than the input audio data. Original format: ${audioData.format}, new format: ${newAudioData.format}`);
|
|
150
|
+
}
|
|
151
|
+
if (newAudioData.timestamp !== audioData.timestamp) {
|
|
152
|
+
throw new error_cause_1.default(`onAudioData returned a different timestamp than the input audio data. Original timestamp: ${audioData.timestamp}, new timestamp: ${newAudioData.timestamp}`);
|
|
153
|
+
}
|
|
154
|
+
audioData.close();
|
|
155
|
+
}
|
|
156
|
+
await audioEncoder.encodeFrame(newAudioData);
|
|
136
157
|
onMediaStateUpdate === null || onMediaStateUpdate === void 0 ? void 0 : onMediaStateUpdate((prevState) => {
|
|
137
158
|
return {
|
|
138
159
|
...prevState,
|
|
139
160
|
decodedAudioFrames: prevState.decodedAudioFrames + 1,
|
|
140
161
|
};
|
|
141
162
|
});
|
|
142
|
-
|
|
163
|
+
newAudioData.close();
|
|
143
164
|
},
|
|
144
165
|
onError(error) {
|
|
145
166
|
abortConversion(new error_cause_1.default(`Audio decoder of track ${track.trackId} failed. Config: ${JSON.stringify(audioDecoderConfig)} (see .cause of this error)`, {
|
|
@@ -1,2 +1,2 @@
|
|
|
1
1
|
import type { ConvertMediaContainer } from './get-available-containers';
|
|
2
|
-
export declare const selectContainerCreator: (container: ConvertMediaContainer) => ({
|
|
2
|
+
export declare const selectContainerCreator: (container: ConvertMediaContainer) => ({ filename, logLevel, onBytesProgress, onMillisecondsProgress, writer, progressTracker, }: import("./create/media-fn").MediaFnGeneratorInput) => Promise<import("./create/media-fn").MediaFn>;
|
|
@@ -12,6 +12,8 @@ const state = media_parser_1.MediaParserInternals.makeParserState({
|
|
|
12
12
|
onAudioTrack: null,
|
|
13
13
|
onVideoTrack: null,
|
|
14
14
|
supportsContentRange: true,
|
|
15
|
+
contentLength: null,
|
|
16
|
+
logLevel: 'info',
|
|
15
17
|
});
|
|
16
18
|
(0, bun_test_1.test)('Should make Matroska header that is same as input', async () => {
|
|
17
19
|
const headerOutput = (0, matroska_utils_1.makeMatroskaBytes)({
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export {};
|
|
@@ -0,0 +1,12 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
const node_1 = require("@remotion/media-parser/node");
|
|
4
|
+
const bun_test_1 = require("bun:test");
|
|
5
|
+
const convert_media_1 = require("../convert-media");
|
|
6
|
+
(0, bun_test_1.test)('should be able to remux on server side', async () => {
|
|
7
|
+
await (0, convert_media_1.convertMedia)({
|
|
8
|
+
src: '/Users/jonathanburger/Downloads/odaje_glitch.mov',
|
|
9
|
+
reader: node_1.nodeReader,
|
|
10
|
+
container: 'mp4',
|
|
11
|
+
});
|
|
12
|
+
});
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export {};
|
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
const example_videos_1 = require("@remotion/example-videos");
|
|
4
|
+
const node_1 = require("@remotion/media-parser/node");
|
|
5
|
+
const bun_test_1 = require("bun:test");
|
|
6
|
+
const node_fs_1 = require("node:fs");
|
|
7
|
+
const convert_media_1 = require("../convert-media");
|
|
8
|
+
const node_2 = require("../writers/node");
|
|
9
|
+
(0, bun_test_1.test)('should be able to remux server side', async () => {
|
|
10
|
+
const { save } = await (0, convert_media_1.convertMedia)({
|
|
11
|
+
src: example_videos_1.exampleVideos.bigBuckBunny,
|
|
12
|
+
reader: node_1.nodeReader,
|
|
13
|
+
container: 'mp4',
|
|
14
|
+
writer: (0, node_2.nodeWriter)('outputbun.mp4'),
|
|
15
|
+
});
|
|
16
|
+
const data = await save();
|
|
17
|
+
(0, bun_test_1.expect)(data.size).toBe(15306323);
|
|
18
|
+
(0, node_fs_1.unlinkSync)('outputbun.mp4');
|
|
19
|
+
});
|
package/dist/test/stsd.test.js
CHANGED
|
@@ -47,9 +47,9 @@ const bun_test_1 = require("bun:test");
|
|
|
47
47
|
getIterator: () => null,
|
|
48
48
|
fields: {},
|
|
49
49
|
supportsContentRange: true,
|
|
50
|
+
contentLength: null,
|
|
51
|
+
logLevel: 'info',
|
|
50
52
|
}),
|
|
51
|
-
signal: null,
|
|
52
|
-
fields: {},
|
|
53
53
|
});
|
|
54
54
|
(0, bun_test_1.expect)(parsed).toEqual({
|
|
55
55
|
offset: 0,
|
|
@@ -214,10 +214,9 @@ const bun_test_1 = require("bun:test");
|
|
|
214
214
|
structure: true,
|
|
215
215
|
},
|
|
216
216
|
supportsContentRange: true,
|
|
217
|
+
contentLength: null,
|
|
218
|
+
logLevel: 'info',
|
|
217
219
|
}),
|
|
218
|
-
signal: null,
|
|
219
|
-
logLevel: 'info',
|
|
220
|
-
fields: {},
|
|
221
220
|
});
|
|
222
221
|
(0, bun_test_1.expect)(parsed.sample).toEqual({
|
|
223
222
|
size: 158,
|
|
@@ -0,0 +1,78 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
+
};
|
|
5
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
+
exports.fsWriter = void 0;
|
|
7
|
+
const fs_1 = __importDefault(require("fs"));
|
|
8
|
+
const createContent = (filename) => {
|
|
9
|
+
return async () => {
|
|
10
|
+
let writPromise = Promise.resolve();
|
|
11
|
+
const remove = () => {
|
|
12
|
+
try {
|
|
13
|
+
fs_1.default.unlinkSync(filename);
|
|
14
|
+
}
|
|
15
|
+
catch (_a) { }
|
|
16
|
+
return Promise.resolve();
|
|
17
|
+
};
|
|
18
|
+
await remove();
|
|
19
|
+
if (!fs_1.default.existsSync(filename)) {
|
|
20
|
+
fs_1.default.writeFileSync(filename, '');
|
|
21
|
+
}
|
|
22
|
+
const writeStream = fs_1.default.openSync(filename, 'w');
|
|
23
|
+
let written = 0;
|
|
24
|
+
const write = async (data) => {
|
|
25
|
+
await new Promise((resolve, reject) => {
|
|
26
|
+
fs_1.default.write(writeStream, data, 0, data.byteLength, written, (err) => {
|
|
27
|
+
if (err) {
|
|
28
|
+
reject(err);
|
|
29
|
+
return;
|
|
30
|
+
}
|
|
31
|
+
resolve();
|
|
32
|
+
});
|
|
33
|
+
});
|
|
34
|
+
written += data.byteLength;
|
|
35
|
+
};
|
|
36
|
+
const updateDataAt = (position, data) => {
|
|
37
|
+
return new Promise((resolve, reject) => {
|
|
38
|
+
fs_1.default.write(writeStream, data, 0, data.byteLength, position, (err) => {
|
|
39
|
+
if (err) {
|
|
40
|
+
reject(err);
|
|
41
|
+
return;
|
|
42
|
+
}
|
|
43
|
+
resolve();
|
|
44
|
+
});
|
|
45
|
+
});
|
|
46
|
+
};
|
|
47
|
+
const writer = {
|
|
48
|
+
write: (arr) => {
|
|
49
|
+
writPromise = writPromise.then(() => write(arr));
|
|
50
|
+
return writPromise;
|
|
51
|
+
},
|
|
52
|
+
updateDataAt: (position, data) => {
|
|
53
|
+
writPromise = writPromise.then(() => updateDataAt(position, data));
|
|
54
|
+
return writPromise;
|
|
55
|
+
},
|
|
56
|
+
waitForFinish: async () => {
|
|
57
|
+
await writPromise;
|
|
58
|
+
},
|
|
59
|
+
getWrittenByteCount: () => written,
|
|
60
|
+
remove,
|
|
61
|
+
save: async () => {
|
|
62
|
+
try {
|
|
63
|
+
fs_1.default.closeSync(writeStream);
|
|
64
|
+
const file = await fs_1.default.promises.readFile(filename);
|
|
65
|
+
return new Blob([file]);
|
|
66
|
+
}
|
|
67
|
+
catch (e) {
|
|
68
|
+
return Promise.reject(e);
|
|
69
|
+
}
|
|
70
|
+
},
|
|
71
|
+
};
|
|
72
|
+
return writer;
|
|
73
|
+
};
|
|
74
|
+
};
|
|
75
|
+
const fsWriter = (path) => {
|
|
76
|
+
return { createContent: createContent(path) };
|
|
77
|
+
};
|
|
78
|
+
exports.fsWriter = fsWriter;
|
|
@@ -0,0 +1,77 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
+
};
|
|
5
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
+
exports.nodeWriter = void 0;
|
|
7
|
+
const node_fs_1 = __importDefault(require("node:fs"));
|
|
8
|
+
const createContent = (filename) => {
|
|
9
|
+
return async () => {
|
|
10
|
+
const writPromise = Promise.resolve();
|
|
11
|
+
const remove = async () => {
|
|
12
|
+
await node_fs_1.default.promises.unlink(filename).catch(() => { });
|
|
13
|
+
};
|
|
14
|
+
await remove();
|
|
15
|
+
const writeStream = await new Promise((resolve, reject) => {
|
|
16
|
+
node_fs_1.default.open(filename, 'w', (err, fd) => {
|
|
17
|
+
if (err) {
|
|
18
|
+
reject(err);
|
|
19
|
+
return;
|
|
20
|
+
}
|
|
21
|
+
resolve(fd);
|
|
22
|
+
});
|
|
23
|
+
});
|
|
24
|
+
let written = 0;
|
|
25
|
+
const write = async (arr) => {
|
|
26
|
+
await new Promise((resolve, reject) => {
|
|
27
|
+
node_fs_1.default.write(writeStream, arr, (err) => {
|
|
28
|
+
if (err) {
|
|
29
|
+
reject(err);
|
|
30
|
+
}
|
|
31
|
+
resolve();
|
|
32
|
+
});
|
|
33
|
+
});
|
|
34
|
+
written += arr.byteLength;
|
|
35
|
+
};
|
|
36
|
+
const updateDataAt = async (position, data) => {
|
|
37
|
+
return new Promise((resolve, reject) => {
|
|
38
|
+
node_fs_1.default.write(writeStream, data, 0, data.length, position, (err) => {
|
|
39
|
+
if (err) {
|
|
40
|
+
reject(err);
|
|
41
|
+
}
|
|
42
|
+
resolve();
|
|
43
|
+
});
|
|
44
|
+
});
|
|
45
|
+
};
|
|
46
|
+
const writer = {
|
|
47
|
+
write: (arr) => {
|
|
48
|
+
writPromise.then(() => write(arr));
|
|
49
|
+
return writPromise;
|
|
50
|
+
},
|
|
51
|
+
updateDataAt: (position, data) => {
|
|
52
|
+
writPromise.then(() => updateDataAt(position, data));
|
|
53
|
+
return writPromise;
|
|
54
|
+
},
|
|
55
|
+
waitForFinish: async () => {
|
|
56
|
+
await writPromise;
|
|
57
|
+
},
|
|
58
|
+
getWrittenByteCount: () => written,
|
|
59
|
+
remove,
|
|
60
|
+
save: async () => {
|
|
61
|
+
try {
|
|
62
|
+
node_fs_1.default.closeSync(writeStream);
|
|
63
|
+
const file = await node_fs_1.default.promises.readFile(filename);
|
|
64
|
+
return new Blob([file]);
|
|
65
|
+
}
|
|
66
|
+
catch (e) {
|
|
67
|
+
return Promise.reject(e);
|
|
68
|
+
}
|
|
69
|
+
},
|
|
70
|
+
};
|
|
71
|
+
return writer;
|
|
72
|
+
};
|
|
73
|
+
};
|
|
74
|
+
const nodeWriter = (path) => {
|
|
75
|
+
return { createContent: createContent(path) };
|
|
76
|
+
};
|
|
77
|
+
exports.nodeWriter = nodeWriter;
|
|
@@ -0,0 +1,74 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
+
};
|
|
5
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
+
exports.nodeWriter = void 0;
|
|
7
|
+
const node_fs_1 = __importDefault(require("node:fs"));
|
|
8
|
+
const createContent = (filename) => {
|
|
9
|
+
return async () => {
|
|
10
|
+
const writPromise = Promise.resolve();
|
|
11
|
+
const remove = async () => {
|
|
12
|
+
await node_fs_1.default.promises.unlink(filename).catch(() => { });
|
|
13
|
+
};
|
|
14
|
+
await remove();
|
|
15
|
+
if (!node_fs_1.default.existsSync(filename)) {
|
|
16
|
+
node_fs_1.default.writeFileSync(filename, '');
|
|
17
|
+
}
|
|
18
|
+
const writeStream = node_fs_1.default.openSync(filename, 'w');
|
|
19
|
+
let written = 0;
|
|
20
|
+
const write = async (data) => {
|
|
21
|
+
await new Promise((resolve, reject) => {
|
|
22
|
+
node_fs_1.default.write(writeStream, data, 0, data.length, undefined, (err) => {
|
|
23
|
+
if (err) {
|
|
24
|
+
reject(err);
|
|
25
|
+
return;
|
|
26
|
+
}
|
|
27
|
+
resolve();
|
|
28
|
+
});
|
|
29
|
+
});
|
|
30
|
+
written += data.byteLength;
|
|
31
|
+
};
|
|
32
|
+
const updateDataAt = (position, data) => {
|
|
33
|
+
return new Promise((resolve, reject) => {
|
|
34
|
+
node_fs_1.default.write(writeStream, data, 0, data.length, position, (err) => {
|
|
35
|
+
if (err) {
|
|
36
|
+
reject(err);
|
|
37
|
+
return;
|
|
38
|
+
}
|
|
39
|
+
resolve();
|
|
40
|
+
});
|
|
41
|
+
});
|
|
42
|
+
};
|
|
43
|
+
const writer = {
|
|
44
|
+
write: (arr) => {
|
|
45
|
+
writPromise.then(() => write(arr));
|
|
46
|
+
return writPromise;
|
|
47
|
+
},
|
|
48
|
+
updateDataAt: (position, data) => {
|
|
49
|
+
writPromise.then(() => updateDataAt(position, data));
|
|
50
|
+
return writPromise;
|
|
51
|
+
},
|
|
52
|
+
waitForFinish: async () => {
|
|
53
|
+
await writPromise;
|
|
54
|
+
},
|
|
55
|
+
getWrittenByteCount: () => written,
|
|
56
|
+
remove,
|
|
57
|
+
save: async () => {
|
|
58
|
+
try {
|
|
59
|
+
node_fs_1.default.closeSync(writeStream);
|
|
60
|
+
const file = await node_fs_1.default.promises.readFile(filename);
|
|
61
|
+
return new Blob([file]);
|
|
62
|
+
}
|
|
63
|
+
catch (e) {
|
|
64
|
+
return Promise.reject(e);
|
|
65
|
+
}
|
|
66
|
+
},
|
|
67
|
+
};
|
|
68
|
+
return writer;
|
|
69
|
+
};
|
|
70
|
+
};
|
|
71
|
+
const nodeWriter = (path) => {
|
|
72
|
+
return { createContent: createContent(path) };
|
|
73
|
+
};
|
|
74
|
+
exports.nodeWriter = nodeWriter;
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@remotion/webcodecs",
|
|
3
|
-
"version": "4.0.
|
|
3
|
+
"version": "4.0.250",
|
|
4
4
|
"main": "dist/index.js",
|
|
5
5
|
"types": "dist/index.d.ts",
|
|
6
6
|
"module": "dist/esm/index.mjs",
|
|
@@ -17,15 +17,15 @@
|
|
|
17
17
|
"author": "Jonny Burger <jonny@remotion.dev>",
|
|
18
18
|
"license": "Remotion License (See https://remotion.dev/docs/webcodecs#license)",
|
|
19
19
|
"dependencies": {
|
|
20
|
-
"@remotion/media-parser": "4.0.
|
|
21
|
-
"@remotion/licensing": "4.0.
|
|
20
|
+
"@remotion/media-parser": "4.0.250",
|
|
21
|
+
"@remotion/licensing": "4.0.250"
|
|
22
22
|
},
|
|
23
23
|
"peerDependencies": {},
|
|
24
24
|
"devDependencies": {
|
|
25
25
|
"@types/dom-webcodecs": "0.1.11",
|
|
26
26
|
"eslint": "9.14.0",
|
|
27
|
-
"@remotion/example-videos": "4.0.
|
|
28
|
-
"@remotion/eslint-config-internal": "4.0.
|
|
27
|
+
"@remotion/example-videos": "4.0.250",
|
|
28
|
+
"@remotion/eslint-config-internal": "4.0.250"
|
|
29
29
|
},
|
|
30
30
|
"keywords": [],
|
|
31
31
|
"publishConfig": {
|
|
@@ -50,6 +50,12 @@
|
|
|
50
50
|
"module": "./dist/esm/buffer.mjs",
|
|
51
51
|
"import": "./dist/esm/buffer.mjs"
|
|
52
52
|
},
|
|
53
|
+
"./node": {
|
|
54
|
+
"types": "./dist/writers/node.d.ts",
|
|
55
|
+
"require": "./dist/writers/node.js",
|
|
56
|
+
"module": "./dist/esm/node.mjs",
|
|
57
|
+
"import": "./dist/esm/node.mjs"
|
|
58
|
+
},
|
|
53
59
|
"./package.json": "./package.json"
|
|
54
60
|
},
|
|
55
61
|
"typesVersions": {
|
|
@@ -57,6 +63,9 @@
|
|
|
57
63
|
"web-fs": [
|
|
58
64
|
"dist/writers/web-fs.d.ts"
|
|
59
65
|
],
|
|
66
|
+
"node": [
|
|
67
|
+
"dist/writers/node.d.ts"
|
|
68
|
+
],
|
|
60
69
|
"buffer": [
|
|
61
70
|
"dist/writers/buffer.d.ts"
|
|
62
71
|
]
|