@remotion/lambda 4.0.200 → 4.0.201
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/api/deploy-site.d.ts +2 -0
- package/dist/api/deploy-site.js +10 -2
- package/dist/cli/commands/sites/create.js +19 -1
- package/dist/cli/helpers/progress-bar.d.ts +5 -0
- package/dist/cli/helpers/progress-bar.js +19 -1
- package/dist/functions/helpers/check-if-render-exists.d.ts +3 -0
- package/dist/functions/helpers/check-if-render-exists.js +15 -0
- package/dist/functions/helpers/delete-chunks.d.ts +9 -0
- package/dist/functions/helpers/delete-chunks.js +25 -0
- package/dist/functions/helpers/expected-out-name.d.ts +1 -1
- package/dist/functions/helpers/get-browser-instance.d.ts +2 -3
- package/dist/functions/helpers/get-browser-instance.js +4 -3
- package/dist/functions/helpers/get-cleanup-progress.d.ts +10 -0
- package/dist/functions/helpers/get-cleanup-progress.js +35 -0
- package/dist/functions/helpers/get-custom-out-name.d.ts +1 -1
- package/dist/functions/helpers/get-encoding-metadata.d.ts +7 -0
- package/dist/functions/helpers/get-encoding-metadata.js +15 -0
- package/dist/functions/helpers/get-encoding-progress-step-size.d.ts +1 -0
- package/dist/functions/helpers/get-encoding-progress-step-size.js +7 -0
- package/dist/functions/helpers/get-files-to-delete.d.ts +10 -0
- package/dist/functions/helpers/get-files-to-delete.js +52 -0
- package/dist/functions/helpers/get-final-encoding-status.d.ts +6 -0
- package/dist/functions/helpers/get-final-encoding-status.js +18 -0
- package/dist/functions/helpers/get-folder-size.d.ts +1 -0
- package/dist/functions/helpers/get-folder-size.js +8 -0
- package/dist/functions/helpers/get-lambdas-invoked-stats.d.ts +8 -0
- package/dist/functions/helpers/get-lambdas-invoked-stats.js +14 -0
- package/dist/functions/helpers/get-post-render-data.d.ts +8 -0
- package/dist/functions/helpers/get-post-render-data.js +22 -0
- package/dist/functions/helpers/get-render-metadata.d.ts +8 -0
- package/dist/functions/helpers/get-render-metadata.js +17 -0
- package/dist/functions/helpers/get-rendered-frames-progress.d.ts +8 -0
- package/dist/functions/helpers/get-rendered-frames-progress.js +37 -0
- package/dist/functions/helpers/get-time-to-finish.d.ts +5 -0
- package/dist/functions/helpers/get-time-to-finish.js +13 -0
- package/dist/functions/helpers/io.d.ts +33 -3
- package/dist/functions/helpers/io.js +104 -3
- package/dist/functions/helpers/print-cloudwatch-helper.d.ts +1 -1
- package/dist/functions/helpers/print-cloudwatch-helper.js +3 -3
- package/dist/functions/helpers/streaming-payloads.d.ts +19 -0
- package/dist/functions/helpers/streaming-payloads.js +25 -0
- package/dist/functions/helpers/write-lambda-error.d.ts +3 -2
- package/dist/functions/helpers/write-lambda-error.js +2 -3
- package/dist/functions/helpers/write-post-render-data.d.ts +9 -0
- package/dist/functions/helpers/write-post-render-data.js +18 -0
- package/dist/functions/merge.d.ts +9 -0
- package/dist/functions/merge.js +61 -0
- package/dist/internals.d.ts +1 -0
- package/dist/shared/__mocks__/read-dir.js +2 -2
- package/dist/shared/chunk-progress.d.ts +9 -0
- package/dist/shared/chunk-progress.js +2034 -0
- package/dist/shared/compress-props.d.ts +6 -8
- package/dist/shared/compress-props.js +14 -11
- package/dist/shared/get-etag.d.ts +1 -1
- package/dist/shared/get-etag.js +33 -12
- package/dist/shared/get-s3-operations.d.ts +2 -1
- package/dist/shared/get-s3-operations.js +38 -20
- package/dist/shared/parse-chunk-key.d.ts +5 -0
- package/dist/shared/parse-chunk-key.js +15 -0
- package/dist/shared/parse-lambda-initialized-key.d.ts +5 -0
- package/dist/shared/parse-lambda-initialized-key.js +15 -0
- package/dist/shared/read-dir.d.ts +6 -5
- package/dist/shared/read-dir.js +9 -6
- package/dist/shared/serialize-props.d.ts +14 -0
- package/dist/shared/serialize-props.js +36 -0
- package/package.json +11 -11
- package/remotionlambda-arm64.zip +0 -0
- package/dist/functions/provider-implementation.d.ts +0 -0
- package/dist/functions/provider-implementation.js +0 -1
|
@@ -1,22 +1,20 @@
|
|
|
1
|
-
import type {
|
|
2
|
-
import {
|
|
1
|
+
import type { SerializedInputProps } from '@remotion/serverless/client';
|
|
2
|
+
import type { AwsRegion } from '../client';
|
|
3
3
|
type PropsType = 'input-props' | 'resolved-props';
|
|
4
4
|
export declare const serializeOrThrow: (inputProps: Record<string, unknown>, propsType: PropsType) => string;
|
|
5
5
|
export declare const getNeedsToUpload: (type: 'still' | 'video-or-audio', sizes: number[]) => boolean;
|
|
6
|
-
export declare const compressInputProps:
|
|
6
|
+
export declare const compressInputProps: ({ stringifiedInputProps, region, userSpecifiedBucketName, propsType, needsToUpload, }: {
|
|
7
7
|
stringifiedInputProps: string;
|
|
8
|
-
region:
|
|
8
|
+
region: AwsRegion;
|
|
9
9
|
userSpecifiedBucketName: string | null;
|
|
10
10
|
propsType: PropsType;
|
|
11
11
|
needsToUpload: boolean;
|
|
12
|
-
providerSpecifics: ProviderSpecifics<Region>;
|
|
13
12
|
}) => Promise<SerializedInputProps>;
|
|
14
|
-
export declare const decompressInputProps:
|
|
13
|
+
export declare const decompressInputProps: ({ serialized, region, bucketName, expectedBucketOwner, propsType, }: {
|
|
15
14
|
serialized: SerializedInputProps;
|
|
16
|
-
region:
|
|
15
|
+
region: AwsRegion;
|
|
17
16
|
bucketName: string;
|
|
18
17
|
expectedBucketOwner: string;
|
|
19
18
|
propsType: PropsType;
|
|
20
|
-
providerSpecifics: ProviderSpecifics<Region>;
|
|
21
19
|
}) => Promise<string>;
|
|
22
20
|
export {};
|
|
@@ -1,14 +1,18 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
3
|
exports.decompressInputProps = exports.compressInputProps = exports.getNeedsToUpload = exports.serializeOrThrow = void 0;
|
|
4
|
-
const client_1 = require("@remotion/serverless/client");
|
|
5
4
|
const no_react_1 = require("remotion/no-react");
|
|
5
|
+
const get_or_create_bucket_1 = require("../api/get-or-create-bucket");
|
|
6
|
+
const io_1 = require("../functions/helpers/io");
|
|
7
|
+
const constants_1 = require("./constants");
|
|
8
|
+
const random_hash_1 = require("./random-hash");
|
|
9
|
+
const stream_to_string_1 = require("./stream-to-string");
|
|
6
10
|
const validate_webhook_1 = require("./validate-webhook");
|
|
7
11
|
const makeKey = (type, hash) => {
|
|
8
12
|
if (type === 'input-props') {
|
|
9
|
-
return (0,
|
|
13
|
+
return (0, constants_1.inputPropsKey)(hash);
|
|
10
14
|
}
|
|
11
|
-
return (0,
|
|
15
|
+
return (0, constants_1.resolvedPropsKey)(hash);
|
|
12
16
|
};
|
|
13
17
|
const serializeOrThrow = (inputProps, propsType) => {
|
|
14
18
|
try {
|
|
@@ -35,16 +39,15 @@ const getNeedsToUpload = (type, sizes) => {
|
|
|
35
39
|
return false;
|
|
36
40
|
};
|
|
37
41
|
exports.getNeedsToUpload = getNeedsToUpload;
|
|
38
|
-
const compressInputProps = async ({ stringifiedInputProps, region, userSpecifiedBucketName, propsType, needsToUpload,
|
|
39
|
-
const hash =
|
|
42
|
+
const compressInputProps = async ({ stringifiedInputProps, region, userSpecifiedBucketName, propsType, needsToUpload, }) => {
|
|
43
|
+
const hash = (0, random_hash_1.randomHash)();
|
|
40
44
|
if (needsToUpload) {
|
|
41
|
-
const bucketName = userSpecifiedBucketName !== null && userSpecifiedBucketName !== void 0 ? userSpecifiedBucketName : (await (0,
|
|
45
|
+
const bucketName = userSpecifiedBucketName !== null && userSpecifiedBucketName !== void 0 ? userSpecifiedBucketName : (await (0, get_or_create_bucket_1.internalGetOrCreateBucket)({
|
|
42
46
|
region,
|
|
43
47
|
enableFolderExpiry: null,
|
|
44
48
|
customCredentials: null,
|
|
45
|
-
providerSpecifics,
|
|
46
49
|
})).bucketName;
|
|
47
|
-
await
|
|
50
|
+
await (0, io_1.lambdaWriteFile)({
|
|
48
51
|
body: stringifiedInputProps,
|
|
49
52
|
bucketName,
|
|
50
53
|
region,
|
|
@@ -66,18 +69,18 @@ const compressInputProps = async ({ stringifiedInputProps, region, userSpecified
|
|
|
66
69
|
};
|
|
67
70
|
};
|
|
68
71
|
exports.compressInputProps = compressInputProps;
|
|
69
|
-
const decompressInputProps = async ({ serialized, region, bucketName, expectedBucketOwner, propsType,
|
|
72
|
+
const decompressInputProps = async ({ serialized, region, bucketName, expectedBucketOwner, propsType, }) => {
|
|
70
73
|
if (serialized.type === 'payload') {
|
|
71
74
|
return serialized.payload;
|
|
72
75
|
}
|
|
73
76
|
try {
|
|
74
|
-
const response = await
|
|
77
|
+
const response = await (0, io_1.lambdaReadFile)({
|
|
75
78
|
bucketName,
|
|
76
79
|
expectedBucketOwner,
|
|
77
80
|
key: makeKey(propsType, serialized.hash),
|
|
78
81
|
region,
|
|
79
82
|
});
|
|
80
|
-
const body = await (0,
|
|
83
|
+
const body = await (0, stream_to_string_1.streamToString)(response);
|
|
81
84
|
const payload = body;
|
|
82
85
|
return payload;
|
|
83
86
|
}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
export declare const getEtagOfFile: (filePath: string) => Promise<string>;
|
|
1
|
+
export declare const getEtagOfFile: (filePath: string, onProgress: (bytes: number) => void) => () => Promise<string>;
|
package/dist/shared/get-etag.js
CHANGED
|
@@ -8,17 +8,38 @@ const node_crypto_1 = __importDefault(require("node:crypto"));
|
|
|
8
8
|
const node_fs_1 = __importDefault(require("node:fs"));
|
|
9
9
|
const chunk = 1024 * 1024 * 5; // 5MB
|
|
10
10
|
const md5 = (data) => node_crypto_1.default.createHash('md5').update(data).digest('hex');
|
|
11
|
-
const getEtagOfFile =
|
|
12
|
-
const
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
11
|
+
const getEtagOfFile = (filePath, onProgress) => {
|
|
12
|
+
const calc = async () => {
|
|
13
|
+
const size = await node_fs_1.default.promises.stat(filePath).then((s) => s.size);
|
|
14
|
+
if (size <= chunk) {
|
|
15
|
+
const buffer = await node_fs_1.default.promises.readFile(filePath);
|
|
16
|
+
return `"${md5(buffer)}"`;
|
|
17
|
+
}
|
|
18
|
+
const stream = node_fs_1.default.createReadStream(filePath, {
|
|
19
|
+
highWaterMark: chunk,
|
|
20
|
+
});
|
|
21
|
+
const md5Chunks = [];
|
|
22
|
+
const chunksNumber = Math.ceil(size / chunk);
|
|
23
|
+
return new Promise((resolve, reject) => {
|
|
24
|
+
stream.on('data', (c) => {
|
|
25
|
+
md5Chunks.push(md5(c));
|
|
26
|
+
onProgress(c.length);
|
|
27
|
+
});
|
|
28
|
+
stream.on('end', () => {
|
|
29
|
+
resolve(`"${md5(Buffer.from(md5Chunks.join(''), 'hex'))}-${chunksNumber}"`);
|
|
30
|
+
});
|
|
31
|
+
stream.on('error', (err) => {
|
|
32
|
+
reject(err);
|
|
33
|
+
});
|
|
34
|
+
});
|
|
35
|
+
};
|
|
36
|
+
let tag = null;
|
|
37
|
+
return async () => {
|
|
38
|
+
if (tag !== null) {
|
|
39
|
+
return tag;
|
|
40
|
+
}
|
|
41
|
+
tag = await calc();
|
|
42
|
+
return tag;
|
|
43
|
+
};
|
|
23
44
|
};
|
|
24
45
|
exports.getEtagOfFile = getEtagOfFile;
|
|
@@ -1,8 +1,9 @@
|
|
|
1
1
|
import type { _Object } from '@aws-sdk/client-s3';
|
|
2
|
-
export declare const getS3DiffOperations: ({ objects, bundle, prefix, }: {
|
|
2
|
+
export declare const getS3DiffOperations: ({ objects, bundle, prefix, onProgress, }: {
|
|
3
3
|
objects: _Object[];
|
|
4
4
|
bundle: string;
|
|
5
5
|
prefix: string;
|
|
6
|
+
onProgress: (bytes: number) => void;
|
|
6
7
|
}) => Promise<{
|
|
7
8
|
toDelete: _Object[];
|
|
8
9
|
toUpload: string[];
|
|
@@ -2,31 +2,49 @@
|
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
3
|
exports.getS3DiffOperations = void 0;
|
|
4
4
|
const read_dir_1 = require("./read-dir");
|
|
5
|
-
const getS3DiffOperations = async ({ objects, bundle, prefix, }) => {
|
|
6
|
-
|
|
5
|
+
const getS3DiffOperations = async ({ objects, bundle, prefix, onProgress, }) => {
|
|
6
|
+
var _a, _b, _c;
|
|
7
|
+
let totalBytes = 0;
|
|
8
|
+
const dir = (0, read_dir_1.readDirectory)({
|
|
7
9
|
dir: bundle,
|
|
8
10
|
etags: {},
|
|
9
11
|
originalDir: bundle,
|
|
12
|
+
onProgress: (bytes) => {
|
|
13
|
+
totalBytes += bytes;
|
|
14
|
+
onProgress(totalBytes);
|
|
15
|
+
},
|
|
10
16
|
});
|
|
11
|
-
const filesOnS3ButNotLocal =
|
|
12
|
-
|
|
17
|
+
const filesOnS3ButNotLocal = [];
|
|
18
|
+
for (const fileOnS3 of objects) {
|
|
13
19
|
const key = (_a = fileOnS3.Key) === null || _a === void 0 ? void 0 : _a.substring(prefix.length + 1);
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
20
|
+
if (!dir[key]) {
|
|
21
|
+
filesOnS3ButNotLocal.push(fileOnS3);
|
|
22
|
+
}
|
|
23
|
+
}
|
|
24
|
+
const localFilesNotOnS3 = [];
|
|
25
|
+
for (const d of Object.keys(dir)) {
|
|
26
|
+
let found;
|
|
27
|
+
for (const o of objects) {
|
|
28
|
+
const key = (_b = o.Key) === null || _b === void 0 ? void 0 : _b.substring(prefix.length + 1);
|
|
29
|
+
if (key === d && o.ETag === (await dir[d]())) {
|
|
30
|
+
found = o;
|
|
31
|
+
break;
|
|
32
|
+
}
|
|
33
|
+
}
|
|
34
|
+
if (!found) {
|
|
35
|
+
localFilesNotOnS3.push(d);
|
|
36
|
+
}
|
|
37
|
+
}
|
|
38
|
+
const existing = [];
|
|
39
|
+
for (const d of Object.keys(dir)) {
|
|
40
|
+
for (const o of objects) {
|
|
41
|
+
const key = (_c = o.Key) === null || _c === void 0 ? void 0 : _c.substring(prefix.length + 1);
|
|
42
|
+
if (key === d && o.ETag === (await dir[d]())) {
|
|
43
|
+
existing.push(d);
|
|
44
|
+
break;
|
|
45
|
+
}
|
|
46
|
+
}
|
|
47
|
+
}
|
|
30
48
|
return {
|
|
31
49
|
toDelete: filesOnS3ButNotLocal,
|
|
32
50
|
toUpload: localFilesNotOnS3,
|
|
@@ -0,0 +1,15 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.parseLambdaChunkKey = void 0;
|
|
4
|
+
const parseLambdaChunkKey = (key) => {
|
|
5
|
+
const match = key.match(/^renders\/(.*)\/chunks\/chunk:([0-9]+):(video|audio)$/);
|
|
6
|
+
if (!match) {
|
|
7
|
+
throw new Error(`Cannot parse filename ${key} into timing information. Malformed data.`);
|
|
8
|
+
}
|
|
9
|
+
return {
|
|
10
|
+
renderId: match[1],
|
|
11
|
+
chunk: Number(match[2]),
|
|
12
|
+
type: match[3],
|
|
13
|
+
};
|
|
14
|
+
};
|
|
15
|
+
exports.parseLambdaChunkKey = parseLambdaChunkKey;
|
|
@@ -0,0 +1,15 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.parseLambdaInitializedKey = void 0;
|
|
4
|
+
const parseLambdaInitializedKey = (key) => {
|
|
5
|
+
const match = key.match(/^renders\/(.*)\/lambda-initialized-chunk:([0-9]+)-attempt:([0-9]+).txt$/);
|
|
6
|
+
if (!match) {
|
|
7
|
+
throw new Error(`Cannot parse filename ${key} into timing information. Malformed data.`);
|
|
8
|
+
}
|
|
9
|
+
return {
|
|
10
|
+
renderId: match[1],
|
|
11
|
+
chunk: Number(match[2]),
|
|
12
|
+
attempt: Number(match[3]),
|
|
13
|
+
};
|
|
14
|
+
};
|
|
15
|
+
exports.parseLambdaInitializedKey = parseLambdaInitializedKey;
|
|
@@ -1,9 +1,10 @@
|
|
|
1
|
-
export declare function readDirectory({ dir, etags, originalDir, }: {
|
|
1
|
+
export declare function readDirectory({ dir, etags, originalDir, onProgress, }: {
|
|
2
2
|
dir: string;
|
|
3
3
|
etags: {
|
|
4
|
-
[key: string]: string
|
|
4
|
+
[key: string]: () => Promise<string>;
|
|
5
5
|
};
|
|
6
6
|
originalDir: string;
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
|
|
7
|
+
onProgress: (bytes: number) => void;
|
|
8
|
+
}): {
|
|
9
|
+
[key: string]: () => Promise<string>;
|
|
10
|
+
};
|
package/dist/shared/read-dir.js
CHANGED
|
@@ -29,7 +29,7 @@ const path = __importStar(require("node:path"));
|
|
|
29
29
|
const get_etag_1 = require("./get-etag");
|
|
30
30
|
// Function to recursively read a directory and return a list of files
|
|
31
31
|
// with their etags and file names
|
|
32
|
-
|
|
32
|
+
function readDirectory({ dir, etags, originalDir, onProgress, }) {
|
|
33
33
|
const files = fs.readdirSync(dir);
|
|
34
34
|
for (const file of files) {
|
|
35
35
|
if (file.startsWith('.DS_Store'))
|
|
@@ -38,19 +38,22 @@ async function readDirectory({ dir, etags, originalDir, }) {
|
|
|
38
38
|
if (fs.lstatSync(filePath).isDirectory()) {
|
|
39
39
|
etags = {
|
|
40
40
|
...etags,
|
|
41
|
-
...
|
|
41
|
+
...readDirectory({
|
|
42
|
+
dir: filePath,
|
|
43
|
+
etags,
|
|
44
|
+
originalDir,
|
|
45
|
+
onProgress,
|
|
46
|
+
}),
|
|
42
47
|
};
|
|
43
48
|
continue;
|
|
44
49
|
}
|
|
45
50
|
// eslint-disable-next-line no-lonely-if
|
|
46
51
|
if (fs.lstatSync(filePath).isSymbolicLink()) {
|
|
47
52
|
const realPath = fs.realpathSync(filePath);
|
|
48
|
-
etags[path.relative(originalDir, filePath)] =
|
|
49
|
-
await (0, get_etag_1.getEtagOfFile)(realPath);
|
|
53
|
+
etags[path.relative(originalDir, filePath)] = (0, get_etag_1.getEtagOfFile)(realPath, onProgress);
|
|
50
54
|
}
|
|
51
55
|
else {
|
|
52
|
-
etags[path.relative(originalDir, filePath)] =
|
|
53
|
-
await (0, get_etag_1.getEtagOfFile)(filePath);
|
|
56
|
+
etags[path.relative(originalDir, filePath)] = (0, get_etag_1.getEtagOfFile)(filePath, onProgress);
|
|
54
57
|
}
|
|
55
58
|
}
|
|
56
59
|
// Return the list of files with their etags and file names
|
|
@@ -0,0 +1,14 @@
|
|
|
1
|
+
type SerializedJSONWithCustomFields = {
|
|
2
|
+
serializedString: string;
|
|
3
|
+
customDateUsed: boolean;
|
|
4
|
+
customFileUsed: boolean;
|
|
5
|
+
mapUsed: boolean;
|
|
6
|
+
setUsed: boolean;
|
|
7
|
+
};
|
|
8
|
+
export declare const FILE_TOKEN = "remotion-file:";
|
|
9
|
+
export declare const serializeJSONWithDate: ({ data, indent, staticBase, }: {
|
|
10
|
+
data: Record<string, unknown>;
|
|
11
|
+
indent: number | undefined;
|
|
12
|
+
staticBase: string | null;
|
|
13
|
+
}) => SerializedJSONWithCustomFields;
|
|
14
|
+
export {};
|
|
@@ -0,0 +1,36 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
// Must keep this file in sync with the one in packages/core/src/input-props-serialization.ts!
|
|
3
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
4
|
+
exports.serializeJSONWithDate = exports.FILE_TOKEN = void 0;
|
|
5
|
+
const DATE_TOKEN = 'remotion-date:';
|
|
6
|
+
exports.FILE_TOKEN = 'remotion-file:';
|
|
7
|
+
const serializeJSONWithDate = ({ data, indent, staticBase, }) => {
|
|
8
|
+
let customDateUsed = false;
|
|
9
|
+
let customFileUsed = false;
|
|
10
|
+
let mapUsed = false;
|
|
11
|
+
let setUsed = false;
|
|
12
|
+
const serializedString = JSON.stringify(data, function (key, value) {
|
|
13
|
+
const item = this[key];
|
|
14
|
+
if (item instanceof Date) {
|
|
15
|
+
customDateUsed = true;
|
|
16
|
+
return `${DATE_TOKEN}${item.toISOString()}`;
|
|
17
|
+
}
|
|
18
|
+
if (item instanceof Map) {
|
|
19
|
+
mapUsed = true;
|
|
20
|
+
return value;
|
|
21
|
+
}
|
|
22
|
+
if (item instanceof Set) {
|
|
23
|
+
setUsed = true;
|
|
24
|
+
return value;
|
|
25
|
+
}
|
|
26
|
+
if (typeof item === 'string' &&
|
|
27
|
+
staticBase !== null &&
|
|
28
|
+
item.startsWith(staticBase)) {
|
|
29
|
+
customFileUsed = true;
|
|
30
|
+
return `${exports.FILE_TOKEN}${item.replace(staticBase + '/', '')}`;
|
|
31
|
+
}
|
|
32
|
+
return value;
|
|
33
|
+
}, indent);
|
|
34
|
+
return { serializedString, customDateUsed, customFileUsed, mapUsed, setUsed };
|
|
35
|
+
};
|
|
36
|
+
exports.serializeJSONWithDate = serializeJSONWithDate;
|
package/package.json
CHANGED
|
@@ -3,7 +3,7 @@
|
|
|
3
3
|
"url": "https://github.com/remotion-dev/remotion/tree/main/packages/lambda"
|
|
4
4
|
},
|
|
5
5
|
"name": "@remotion/lambda",
|
|
6
|
-
"version": "4.0.
|
|
6
|
+
"version": "4.0.201",
|
|
7
7
|
"description": "Render Remotion videos on AWS Lambda",
|
|
8
8
|
"main": "dist/index.js",
|
|
9
9
|
"sideEffects": false,
|
|
@@ -22,13 +22,13 @@
|
|
|
22
22
|
"@aws-sdk/s3-request-presigner": "3.583.0",
|
|
23
23
|
"mime-types": "2.1.34",
|
|
24
24
|
"zod": "3.22.3",
|
|
25
|
-
"@remotion/bundler": "4.0.
|
|
26
|
-
"@remotion/
|
|
27
|
-
"@remotion/
|
|
28
|
-
"@remotion/studio-server": "4.0.
|
|
29
|
-
"@remotion/streaming": "4.0.
|
|
30
|
-
"@remotion/serverless": "4.0.
|
|
31
|
-
"remotion": "4.0.
|
|
25
|
+
"@remotion/bundler": "4.0.201",
|
|
26
|
+
"@remotion/cli": "4.0.201",
|
|
27
|
+
"@remotion/renderer": "4.0.201",
|
|
28
|
+
"@remotion/studio-server": "4.0.201",
|
|
29
|
+
"@remotion/streaming": "4.0.201",
|
|
30
|
+
"@remotion/serverless": "4.0.201",
|
|
31
|
+
"remotion": "4.0.201"
|
|
32
32
|
},
|
|
33
33
|
"devDependencies": {
|
|
34
34
|
"@types/mime-types": "2.1.1",
|
|
@@ -37,11 +37,11 @@
|
|
|
37
37
|
"pureimage": "0.4.13",
|
|
38
38
|
"vitest": "0.31.1",
|
|
39
39
|
"zip-lib": "^0.7.2",
|
|
40
|
-
"@remotion/bundler": "4.0.
|
|
41
|
-
"@remotion/compositor-linux-arm64-gnu": "4.0.
|
|
40
|
+
"@remotion/bundler": "4.0.201",
|
|
41
|
+
"@remotion/compositor-linux-arm64-gnu": "4.0.201"
|
|
42
42
|
},
|
|
43
43
|
"peerDependencies": {
|
|
44
|
-
"@remotion/bundler": "4.0.
|
|
44
|
+
"@remotion/bundler": "4.0.201"
|
|
45
45
|
},
|
|
46
46
|
"publishConfig": {
|
|
47
47
|
"access": "public"
|
package/remotionlambda-arm64.zip
CHANGED
|
Binary file
|
|
File without changes
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
"use strict";
|