@remotion/webcodecs 4.0.249 → 4.0.250

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -11,6 +11,11 @@ const autoSelectWriter = async (writer, logLevel) => {
11
11
  return writer;
12
12
  }
13
13
  log_1.Log.verbose(logLevel, 'Determining best writer');
14
+ const hasNavigator = typeof navigator !== 'undefined';
15
+ if (!hasNavigator) {
16
+ log_1.Log.verbose(logLevel, 'No navigator API detected, using buffer writer');
17
+ return buffer_1.bufferWriter;
18
+ }
14
19
  // Check if we're offline using the navigator API
15
20
  const isOffline = !navigator.onLine;
16
21
  if (isOffline) {
@@ -1,6 +1,9 @@
1
1
  import type { VideoTrack } from '@remotion/media-parser';
2
2
  import type { ConvertMediaVideoCodec } from './get-available-video-codecs';
3
- export declare const canReencodeVideoTrack: ({ videoCodec, track, }: {
3
+ import type { ResizeOperation } from './resizing/mode';
4
+ export declare const canReencodeVideoTrack: ({ videoCodec, track, resizeOperation, rotate, }: {
4
5
  videoCodec: ConvertMediaVideoCodec;
5
6
  track: VideoTrack;
7
+ resizeOperation: ResizeOperation | null;
8
+ rotate: number | null;
6
9
  }) => Promise<boolean>;
@@ -1,13 +1,21 @@
1
1
  "use strict";
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
3
  exports.canReencodeVideoTrack = void 0;
4
+ const rotation_1 = require("./rotation");
4
5
  const video_decoder_config_1 = require("./video-decoder-config");
5
6
  const video_encoder_config_1 = require("./video-encoder-config");
6
- const canReencodeVideoTrack = async ({ videoCodec, track, }) => {
7
- const videoEncoderConfig = await (0, video_encoder_config_1.getVideoEncoderConfig)({
8
- codec: videoCodec,
7
+ const canReencodeVideoTrack = async ({ videoCodec, track, resizeOperation, rotate, }) => {
8
+ const { height, width } = (0, rotation_1.calculateNewDimensionsFromRotateAndScale)({
9
9
  height: track.displayAspectHeight,
10
+ resizeOperation,
11
+ rotation: rotate !== null && rotate !== void 0 ? rotate : 0,
12
+ videoCodec,
10
13
  width: track.displayAspectWidth,
14
+ });
15
+ const videoEncoderConfig = await (0, video_encoder_config_1.getVideoEncoderConfig)({
16
+ codec: videoCodec,
17
+ height,
18
+ width,
11
19
  fps: track.fps,
12
20
  });
13
21
  const videoDecoderConfig = await (0, video_decoder_config_1.getVideoDecoderConfigWithHardwareAcceleration)(track);
@@ -15,6 +15,8 @@ const defaultOnVideoTrackHandler = async ({ track, defaultVideoCodec, logLevel,
15
15
  const canReencode = await (0, can_reencode_video_track_1.canReencodeVideoTrack)({
16
16
  videoCodec: defaultVideoCodec,
17
17
  track,
18
+ resizeOperation,
19
+ rotate,
18
20
  });
19
21
  if (canReencode) {
20
22
  media_parser_1.MediaParserInternals.Log.verbose(logLevel, `Track ${track.trackId} (video): Cannot copy, but re-enconde, therefore re-encoding`);
@@ -1082,12 +1082,21 @@ var getVideoEncoderConfig = async ({
1082
1082
  // src/can-reencode-video-track.ts
1083
1083
  var canReencodeVideoTrack = async ({
1084
1084
  videoCodec,
1085
- track
1085
+ track,
1086
+ resizeOperation,
1087
+ rotate
1086
1088
  }) => {
1089
+ const { height, width } = calculateNewDimensionsFromRotateAndScale({
1090
+ height: track.displayAspectHeight,
1091
+ resizeOperation,
1092
+ rotation: rotate ?? 0,
1093
+ videoCodec,
1094
+ width: track.displayAspectWidth
1095
+ });
1087
1096
  const videoEncoderConfig = await getVideoEncoderConfig({
1088
1097
  codec: videoCodec,
1089
- height: track.displayAspectHeight,
1090
- width: track.displayAspectWidth,
1098
+ height,
1099
+ width,
1091
1100
  fps: track.fps
1092
1101
  });
1093
1102
  const videoDecoderConfig = await getVideoDecoderConfigWithHardwareAcceleration(track);
@@ -1103,6 +1112,11 @@ var autoSelectWriter = async (writer, logLevel) => {
1103
1112
  return writer;
1104
1113
  }
1105
1114
  Log.verbose(logLevel, "Determining best writer");
1115
+ const hasNavigator = typeof navigator !== "undefined";
1116
+ if (!hasNavigator) {
1117
+ Log.verbose(logLevel, "No navigator API detected, using buffer writer");
1118
+ return bufferWriter;
1119
+ }
1106
1120
  const isOffline = !navigator.onLine;
1107
1121
  if (isOffline) {
1108
1122
  Log.verbose(logLevel, "Offline mode detected, using buffer writer");
@@ -1474,7 +1488,9 @@ var defaultOnVideoTrackHandler = async ({
1474
1488
  }
1475
1489
  const canReencode = await canReencodeVideoTrack({
1476
1490
  videoCodec: defaultVideoCodec,
1477
- track
1491
+ track,
1492
+ resizeOperation,
1493
+ rotate
1478
1494
  });
1479
1495
  if (canReencode) {
1480
1496
  MediaParserInternals4.Log.verbose(logLevel, `Track ${track.trackId} (video): Cannot copy, but re-enconde, therefore re-encoding`);
@@ -0,0 +1,92 @@
1
+ var __create = Object.create;
2
+ var __getProtoOf = Object.getPrototypeOf;
3
+ var __defProp = Object.defineProperty;
4
+ var __getOwnPropNames = Object.getOwnPropertyNames;
5
+ var __hasOwnProp = Object.prototype.hasOwnProperty;
6
+ var __toESM = (mod, isNodeMode, target) => {
7
+ target = mod != null ? __create(__getProtoOf(mod)) : {};
8
+ const to = isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target;
9
+ for (let key of __getOwnPropNames(mod))
10
+ if (!__hasOwnProp.call(to, key))
11
+ __defProp(to, key, {
12
+ get: () => mod[key],
13
+ enumerable: true
14
+ });
15
+ return to;
16
+ };
17
+ var __commonJS = (cb, mod) => () => (mod || cb((mod = { exports: {} }).exports, mod), mod.exports);
18
+
19
+ // src/writers/node-writer.ts
20
+ var { default: fs} = (() => ({}));
21
+ var createContent = (filename) => {
22
+ return async () => {
23
+ const writPromise = Promise.resolve();
24
+ const remove = async () => {
25
+ await fs.promises.unlink(filename).catch(() => {
26
+ });
27
+ };
28
+ await remove();
29
+ const writeStream = await new Promise((resolve, reject) => {
30
+ fs.open(filename, "w", (err, fd) => {
31
+ if (err) {
32
+ reject(err);
33
+ return;
34
+ }
35
+ resolve(fd);
36
+ });
37
+ });
38
+ let written = 0;
39
+ const write = async (arr) => {
40
+ await new Promise((resolve, reject) => {
41
+ fs.write(writeStream, arr, (err) => {
42
+ if (err) {
43
+ reject(err);
44
+ }
45
+ resolve();
46
+ });
47
+ });
48
+ written += arr.byteLength;
49
+ };
50
+ const updateDataAt = async (position, data) => {
51
+ return new Promise((resolve, reject) => {
52
+ fs.write(writeStream, data, 0, data.length, position, (err) => {
53
+ if (err) {
54
+ reject(err);
55
+ }
56
+ resolve();
57
+ });
58
+ });
59
+ };
60
+ const writer = {
61
+ write: (arr) => {
62
+ writPromise.then(() => write(arr));
63
+ return writPromise;
64
+ },
65
+ updateDataAt: (position, data) => {
66
+ writPromise.then(() => updateDataAt(position, data));
67
+ return writPromise;
68
+ },
69
+ waitForFinish: async () => {
70
+ await writPromise;
71
+ },
72
+ getWrittenByteCount: () => written,
73
+ remove,
74
+ save: async () => {
75
+ try {
76
+ fs.closeSync(writeStream);
77
+ const file = await fs.promises.readFile(filename);
78
+ return new Blob([file]);
79
+ } catch (e) {
80
+ return Promise.reject(e);
81
+ }
82
+ }
83
+ };
84
+ return writer;
85
+ };
86
+ };
87
+ var nodeWriter = (path) => {
88
+ return { createContent: createContent(path) };
89
+ };
90
+ export {
91
+ nodeWriter
92
+ };
@@ -0,0 +1,71 @@
1
+ // src/writers/node.ts
2
+ import fs from "node:fs";
3
+ var createContent = (filename) => {
4
+ return async () => {
5
+ const writPromise = Promise.resolve();
6
+ const remove = async () => {
7
+ await fs.promises.unlink(filename).catch(() => {
8
+ });
9
+ };
10
+ await remove();
11
+ if (!fs.existsSync(filename)) {
12
+ fs.writeFileSync(filename, "");
13
+ }
14
+ const writeStream = fs.openSync(filename, "w");
15
+ let written = 0;
16
+ const write = async (data) => {
17
+ await new Promise((resolve, reject) => {
18
+ fs.write(writeStream, data, 0, data.length, undefined, (err) => {
19
+ if (err) {
20
+ reject(err);
21
+ return;
22
+ }
23
+ resolve();
24
+ });
25
+ });
26
+ written += data.byteLength;
27
+ };
28
+ const updateDataAt = (position, data) => {
29
+ return new Promise((resolve, reject) => {
30
+ fs.write(writeStream, data, 0, data.length, position, (err) => {
31
+ if (err) {
32
+ reject(err);
33
+ return;
34
+ }
35
+ resolve();
36
+ });
37
+ });
38
+ };
39
+ const writer = {
40
+ write: (arr) => {
41
+ writPromise.then(() => write(arr));
42
+ return writPromise;
43
+ },
44
+ updateDataAt: (position, data) => {
45
+ writPromise.then(() => updateDataAt(position, data));
46
+ return writPromise;
47
+ },
48
+ waitForFinish: async () => {
49
+ await writPromise;
50
+ },
51
+ getWrittenByteCount: () => written,
52
+ remove,
53
+ save: async () => {
54
+ try {
55
+ fs.closeSync(writeStream);
56
+ const file = await fs.promises.readFile(filename);
57
+ return new Blob([file]);
58
+ } catch (e) {
59
+ return Promise.reject(e);
60
+ }
61
+ }
62
+ };
63
+ return writer;
64
+ };
65
+ };
66
+ var nodeWriter = (path) => {
67
+ return { createContent: createContent(path) };
68
+ };
69
+ export {
70
+ nodeWriter
71
+ };
@@ -13,6 +13,7 @@ const state = media_parser_1.MediaParserInternals.makeParserState({
13
13
  onVideoTrack: null,
14
14
  supportsContentRange: true,
15
15
  contentLength: null,
16
+ logLevel: 'info',
16
17
  });
17
18
  (0, bun_test_1.test)('Should make Matroska header that is same as input', async () => {
18
19
  const headerOutput = (0, matroska_utils_1.makeMatroskaBytes)({
@@ -0,0 +1 @@
1
+ export {};
@@ -0,0 +1,12 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ const node_1 = require("@remotion/media-parser/node");
4
+ const bun_test_1 = require("bun:test");
5
+ const convert_media_1 = require("../convert-media");
6
+ (0, bun_test_1.test)('should be able to remux on server side', async () => {
7
+ await (0, convert_media_1.convertMedia)({
8
+ src: '/Users/jonathanburger/Downloads/odaje_glitch.mov',
9
+ reader: node_1.nodeReader,
10
+ container: 'mp4',
11
+ });
12
+ });
@@ -0,0 +1 @@
1
+ export {};
@@ -0,0 +1,19 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ const example_videos_1 = require("@remotion/example-videos");
4
+ const node_1 = require("@remotion/media-parser/node");
5
+ const bun_test_1 = require("bun:test");
6
+ const node_fs_1 = require("node:fs");
7
+ const convert_media_1 = require("../convert-media");
8
+ const node_2 = require("../writers/node");
9
+ (0, bun_test_1.test)('should be able to remux server side', async () => {
10
+ const { save } = await (0, convert_media_1.convertMedia)({
11
+ src: example_videos_1.exampleVideos.bigBuckBunny,
12
+ reader: node_1.nodeReader,
13
+ container: 'mp4',
14
+ writer: (0, node_2.nodeWriter)('outputbun.mp4'),
15
+ });
16
+ const data = await save();
17
+ (0, bun_test_1.expect)(data.size).toBe(15306323);
18
+ (0, node_fs_1.unlinkSync)('outputbun.mp4');
19
+ });
@@ -48,9 +48,8 @@ const bun_test_1 = require("bun:test");
48
48
  fields: {},
49
49
  supportsContentRange: true,
50
50
  contentLength: null,
51
+ logLevel: 'info',
51
52
  }),
52
- signal: null,
53
- fields: {},
54
53
  });
55
54
  (0, bun_test_1.expect)(parsed).toEqual({
56
55
  offset: 0,
@@ -216,10 +215,8 @@ const bun_test_1 = require("bun:test");
216
215
  },
217
216
  supportsContentRange: true,
218
217
  contentLength: null,
218
+ logLevel: 'info',
219
219
  }),
220
- signal: null,
221
- logLevel: 'info',
222
- fields: {},
223
220
  });
224
221
  (0, bun_test_1.expect)(parsed.sample).toEqual({
225
222
  size: 158,
@@ -0,0 +1,4 @@
1
+ import type { CreateContent } from './writer';
2
+ export declare const fsWriter: (path: string) => {
3
+ createContent: CreateContent;
4
+ };
@@ -0,0 +1,78 @@
1
+ "use strict";
2
+ var __importDefault = (this && this.__importDefault) || function (mod) {
3
+ return (mod && mod.__esModule) ? mod : { "default": mod };
4
+ };
5
+ Object.defineProperty(exports, "__esModule", { value: true });
6
+ exports.fsWriter = void 0;
7
+ const fs_1 = __importDefault(require("fs"));
8
+ const createContent = (filename) => {
9
+ return async () => {
10
+ let writPromise = Promise.resolve();
11
+ const remove = () => {
12
+ try {
13
+ fs_1.default.unlinkSync(filename);
14
+ }
15
+ catch (_a) { }
16
+ return Promise.resolve();
17
+ };
18
+ await remove();
19
+ if (!fs_1.default.existsSync(filename)) {
20
+ fs_1.default.writeFileSync(filename, '');
21
+ }
22
+ const writeStream = fs_1.default.openSync(filename, 'w');
23
+ let written = 0;
24
+ const write = async (data) => {
25
+ await new Promise((resolve, reject) => {
26
+ fs_1.default.write(writeStream, data, 0, data.byteLength, written, (err) => {
27
+ if (err) {
28
+ reject(err);
29
+ return;
30
+ }
31
+ resolve();
32
+ });
33
+ });
34
+ written += data.byteLength;
35
+ };
36
+ const updateDataAt = (position, data) => {
37
+ return new Promise((resolve, reject) => {
38
+ fs_1.default.write(writeStream, data, 0, data.byteLength, position, (err) => {
39
+ if (err) {
40
+ reject(err);
41
+ return;
42
+ }
43
+ resolve();
44
+ });
45
+ });
46
+ };
47
+ const writer = {
48
+ write: (arr) => {
49
+ writPromise = writPromise.then(() => write(arr));
50
+ return writPromise;
51
+ },
52
+ updateDataAt: (position, data) => {
53
+ writPromise = writPromise.then(() => updateDataAt(position, data));
54
+ return writPromise;
55
+ },
56
+ waitForFinish: async () => {
57
+ await writPromise;
58
+ },
59
+ getWrittenByteCount: () => written,
60
+ remove,
61
+ save: async () => {
62
+ try {
63
+ fs_1.default.closeSync(writeStream);
64
+ const file = await fs_1.default.promises.readFile(filename);
65
+ return new Blob([file]);
66
+ }
67
+ catch (e) {
68
+ return Promise.reject(e);
69
+ }
70
+ },
71
+ };
72
+ return writer;
73
+ };
74
+ };
75
+ const fsWriter = (path) => {
76
+ return { createContent: createContent(path) };
77
+ };
78
+ exports.fsWriter = fsWriter;
@@ -0,0 +1,4 @@
1
+ import type { CreateContent } from './writer';
2
+ export declare const nodeWriter: (path: string) => {
3
+ createContent: CreateContent;
4
+ };
@@ -0,0 +1,77 @@
1
+ "use strict";
2
+ var __importDefault = (this && this.__importDefault) || function (mod) {
3
+ return (mod && mod.__esModule) ? mod : { "default": mod };
4
+ };
5
+ Object.defineProperty(exports, "__esModule", { value: true });
6
+ exports.nodeWriter = void 0;
7
+ const node_fs_1 = __importDefault(require("node:fs"));
8
+ const createContent = (filename) => {
9
+ return async () => {
10
+ const writPromise = Promise.resolve();
11
+ const remove = async () => {
12
+ await node_fs_1.default.promises.unlink(filename).catch(() => { });
13
+ };
14
+ await remove();
15
+ const writeStream = await new Promise((resolve, reject) => {
16
+ node_fs_1.default.open(filename, 'w', (err, fd) => {
17
+ if (err) {
18
+ reject(err);
19
+ return;
20
+ }
21
+ resolve(fd);
22
+ });
23
+ });
24
+ let written = 0;
25
+ const write = async (arr) => {
26
+ await new Promise((resolve, reject) => {
27
+ node_fs_1.default.write(writeStream, arr, (err) => {
28
+ if (err) {
29
+ reject(err);
30
+ }
31
+ resolve();
32
+ });
33
+ });
34
+ written += arr.byteLength;
35
+ };
36
+ const updateDataAt = async (position, data) => {
37
+ return new Promise((resolve, reject) => {
38
+ node_fs_1.default.write(writeStream, data, 0, data.length, position, (err) => {
39
+ if (err) {
40
+ reject(err);
41
+ }
42
+ resolve();
43
+ });
44
+ });
45
+ };
46
+ const writer = {
47
+ write: (arr) => {
48
+ writPromise.then(() => write(arr));
49
+ return writPromise;
50
+ },
51
+ updateDataAt: (position, data) => {
52
+ writPromise.then(() => updateDataAt(position, data));
53
+ return writPromise;
54
+ },
55
+ waitForFinish: async () => {
56
+ await writPromise;
57
+ },
58
+ getWrittenByteCount: () => written,
59
+ remove,
60
+ save: async () => {
61
+ try {
62
+ node_fs_1.default.closeSync(writeStream);
63
+ const file = await node_fs_1.default.promises.readFile(filename);
64
+ return new Blob([file]);
65
+ }
66
+ catch (e) {
67
+ return Promise.reject(e);
68
+ }
69
+ },
70
+ };
71
+ return writer;
72
+ };
73
+ };
74
+ const nodeWriter = (path) => {
75
+ return { createContent: createContent(path) };
76
+ };
77
+ exports.nodeWriter = nodeWriter;
@@ -0,0 +1,4 @@
1
+ import type { CreateContent } from './writer';
2
+ export declare const nodeWriter: (path: string) => {
3
+ createContent: CreateContent;
4
+ };
@@ -0,0 +1,74 @@
1
+ "use strict";
2
+ var __importDefault = (this && this.__importDefault) || function (mod) {
3
+ return (mod && mod.__esModule) ? mod : { "default": mod };
4
+ };
5
+ Object.defineProperty(exports, "__esModule", { value: true });
6
+ exports.nodeWriter = void 0;
7
+ const node_fs_1 = __importDefault(require("node:fs"));
8
+ const createContent = (filename) => {
9
+ return async () => {
10
+ const writPromise = Promise.resolve();
11
+ const remove = async () => {
12
+ await node_fs_1.default.promises.unlink(filename).catch(() => { });
13
+ };
14
+ await remove();
15
+ if (!node_fs_1.default.existsSync(filename)) {
16
+ node_fs_1.default.writeFileSync(filename, '');
17
+ }
18
+ const writeStream = node_fs_1.default.openSync(filename, 'w');
19
+ let written = 0;
20
+ const write = async (data) => {
21
+ await new Promise((resolve, reject) => {
22
+ node_fs_1.default.write(writeStream, data, 0, data.length, undefined, (err) => {
23
+ if (err) {
24
+ reject(err);
25
+ return;
26
+ }
27
+ resolve();
28
+ });
29
+ });
30
+ written += data.byteLength;
31
+ };
32
+ const updateDataAt = (position, data) => {
33
+ return new Promise((resolve, reject) => {
34
+ node_fs_1.default.write(writeStream, data, 0, data.length, position, (err) => {
35
+ if (err) {
36
+ reject(err);
37
+ return;
38
+ }
39
+ resolve();
40
+ });
41
+ });
42
+ };
43
+ const writer = {
44
+ write: (arr) => {
45
+ writPromise.then(() => write(arr));
46
+ return writPromise;
47
+ },
48
+ updateDataAt: (position, data) => {
49
+ writPromise.then(() => updateDataAt(position, data));
50
+ return writPromise;
51
+ },
52
+ waitForFinish: async () => {
53
+ await writPromise;
54
+ },
55
+ getWrittenByteCount: () => written,
56
+ remove,
57
+ save: async () => {
58
+ try {
59
+ node_fs_1.default.closeSync(writeStream);
60
+ const file = await node_fs_1.default.promises.readFile(filename);
61
+ return new Blob([file]);
62
+ }
63
+ catch (e) {
64
+ return Promise.reject(e);
65
+ }
66
+ },
67
+ };
68
+ return writer;
69
+ };
70
+ };
71
+ const nodeWriter = (path) => {
72
+ return { createContent: createContent(path) };
73
+ };
74
+ exports.nodeWriter = nodeWriter;
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@remotion/webcodecs",
3
- "version": "4.0.249",
3
+ "version": "4.0.250",
4
4
  "main": "dist/index.js",
5
5
  "types": "dist/index.d.ts",
6
6
  "module": "dist/esm/index.mjs",
@@ -17,15 +17,15 @@
17
17
  "author": "Jonny Burger <jonny@remotion.dev>",
18
18
  "license": "Remotion License (See https://remotion.dev/docs/webcodecs#license)",
19
19
  "dependencies": {
20
- "@remotion/media-parser": "4.0.249",
21
- "@remotion/licensing": "4.0.249"
20
+ "@remotion/media-parser": "4.0.250",
21
+ "@remotion/licensing": "4.0.250"
22
22
  },
23
23
  "peerDependencies": {},
24
24
  "devDependencies": {
25
25
  "@types/dom-webcodecs": "0.1.11",
26
26
  "eslint": "9.14.0",
27
- "@remotion/eslint-config-internal": "4.0.249",
28
- "@remotion/example-videos": "4.0.249"
27
+ "@remotion/example-videos": "4.0.250",
28
+ "@remotion/eslint-config-internal": "4.0.250"
29
29
  },
30
30
  "keywords": [],
31
31
  "publishConfig": {
@@ -50,6 +50,12 @@
50
50
  "module": "./dist/esm/buffer.mjs",
51
51
  "import": "./dist/esm/buffer.mjs"
52
52
  },
53
+ "./node": {
54
+ "types": "./dist/writers/node.d.ts",
55
+ "require": "./dist/writers/node.js",
56
+ "module": "./dist/esm/node.mjs",
57
+ "import": "./dist/esm/node.mjs"
58
+ },
53
59
  "./package.json": "./package.json"
54
60
  },
55
61
  "typesVersions": {
@@ -57,6 +63,9 @@
57
63
  "web-fs": [
58
64
  "dist/writers/web-fs.d.ts"
59
65
  ],
66
+ "node": [
67
+ "dist/writers/node.d.ts"
68
+ ],
60
69
  "buffer": [
61
70
  "dist/writers/buffer.d.ts"
62
71
  ]