@remotion/lambda 3.3.6 → 3.3.8
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/admin/generate-etag-list.d.ts +1 -0
- package/dist/admin/generate-etag-list.js +14 -0
- package/dist/admin/make-layer-public.js +23 -1
- package/dist/api/deploy-site.d.ts +5 -0
- package/dist/api/deploy-site.js +39 -16
- package/dist/api/get-aws-client.d.ts +2 -0
- package/dist/api/get-aws-client.js +2 -0
- package/dist/api/iam-validation/simulate-rule.js +0 -22
- package/dist/api/iam-validation/simulate.js +17 -5
- package/dist/api/iam-validation/user-permissions.js +0 -6
- package/dist/api/upload-dir.d.ts +4 -3
- package/dist/api/upload-dir.js +33 -19
- package/dist/cli/commands/render/progress.d.ts +6 -3
- package/dist/cli/commands/render/progress.js +49 -39
- package/dist/cli/commands/render/render.js +16 -1
- package/dist/cli/commands/sites/create.js +12 -2
- package/dist/cli/helpers/progress-bar.d.ts +8 -1
- package/dist/cli/helpers/progress-bar.js +17 -1
- package/dist/cli/index.js +6 -1
- package/dist/functions/helpers/concat-videos.d.ts +10 -6
- package/dist/functions/helpers/concat-videos.js +5 -21
- package/dist/functions/helpers/create-post-render-data.js +0 -12
- package/dist/functions/helpers/get-chromium-executable-path.js +2 -1
- package/dist/functions/helpers/get-chunk-progress.d.ts +0 -0
- package/dist/functions/helpers/get-chunk-progress.js +1 -0
- package/dist/functions/helpers/get-current-region.d.ts +1 -1
- package/dist/functions/helpers/get-encoding-metadata.d.ts +4 -8
- package/dist/functions/helpers/get-encoding-metadata.js +4 -18
- package/dist/functions/helpers/get-final-encoding-status.d.ts +2 -4
- package/dist/functions/helpers/get-final-encoding-status.js +1 -4
- package/dist/functions/helpers/get-lambdas-invoked-stats.d.ts +1 -5
- package/dist/functions/helpers/get-lambdas-invoked-stats.js +1 -9
- package/dist/functions/helpers/get-overall-progress.d.ts +2 -1
- package/dist/functions/helpers/get-overall-progress.js +7 -5
- package/dist/functions/helpers/get-progress.js +37 -25
- package/dist/functions/helpers/get-rendered-frames-progress.d.ts +8 -0
- package/dist/functions/helpers/get-rendered-frames-progress.js +37 -0
- package/dist/functions/launch.js +48 -37
- package/dist/functions/renderer.js +15 -22
- package/dist/functions/still.js +2 -0
- package/dist/pricing/aws-regions.d.ts +1 -1
- package/dist/pricing/aws-regions.js +11 -0
- package/dist/pricing/price-per-1-s.js +413 -39
- package/dist/shared/aws-clients.d.ts +3 -0
- package/dist/shared/aws-clients.js +9 -1
- package/dist/shared/chunk-progress.d.ts +9 -0
- package/dist/shared/chunk-progress.js +2034 -0
- package/dist/shared/constants.d.ts +4 -5
- package/dist/shared/get-account-id.js +4 -6
- package/dist/shared/get-chunk-progress.d.ts +0 -0
- package/dist/shared/get-chunk-progress.js +1 -0
- package/dist/shared/get-etag.d.ts +1 -0
- package/dist/shared/get-etag.js +24 -0
- package/dist/shared/get-s3-operations.d.ts +10 -0
- package/dist/shared/get-s3-operations.js +36 -0
- package/dist/shared/hosted-layers.js +368 -60
- package/dist/shared/parse-chunk-key.d.ts +4 -0
- package/dist/shared/parse-chunk-key.js +14 -0
- package/dist/shared/read-dir.d.ts +9 -0
- package/dist/shared/read-dir.js +57 -0
- package/package.json +7 -6
- package/remotionlambda.zip +0 -0
- package/dist/shared/get-cloudwatch-stream-url.d.ts +0 -8
- package/dist/shared/get-cloudwatch-stream-url.js +0 -7
|
@@ -7,6 +7,7 @@ exports.sitesCreateSubcommand = exports.SITES_CREATE_SUBCOMMAND = void 0;
|
|
|
7
7
|
const cli_1 = require("@remotion/cli");
|
|
8
8
|
const fs_1 = require("fs");
|
|
9
9
|
const path_1 = __importDefault(require("path"));
|
|
10
|
+
const remotion_1 = require("remotion");
|
|
10
11
|
const deploy_site_1 = require("../../../api/deploy-site");
|
|
11
12
|
const get_or_create_bucket_1 = require("../../../api/get-or-create-bucket");
|
|
12
13
|
const constants_1 = require("../../../shared/constants");
|
|
@@ -57,6 +58,7 @@ const sitesCreateSubcommand = async (args, remotionRoot) => {
|
|
|
57
58
|
doneIn: null,
|
|
58
59
|
totalSize: null,
|
|
59
60
|
sizeUploaded: 0,
|
|
61
|
+
stats: null,
|
|
60
62
|
},
|
|
61
63
|
};
|
|
62
64
|
const updateProgress = () => {
|
|
@@ -79,7 +81,7 @@ const sitesCreateSubcommand = async (args, remotionRoot) => {
|
|
|
79
81
|
updateProgress();
|
|
80
82
|
const bundleStart = Date.now();
|
|
81
83
|
const uploadStart = Date.now();
|
|
82
|
-
const { serveUrl, siteName } = await (0, deploy_site_1.deploySite)({
|
|
84
|
+
const { serveUrl, siteName, stats } = await (0, deploy_site_1.deploySite)({
|
|
83
85
|
entryPoint: absoluteFile,
|
|
84
86
|
siteName: desiredSiteName,
|
|
85
87
|
bucketName,
|
|
@@ -95,6 +97,7 @@ const sitesCreateSubcommand = async (args, remotionRoot) => {
|
|
|
95
97
|
sizeUploaded: p.sizeUploaded,
|
|
96
98
|
totalSize: p.totalSize,
|
|
97
99
|
doneIn: null,
|
|
100
|
+
stats: null,
|
|
98
101
|
};
|
|
99
102
|
updateProgress();
|
|
100
103
|
},
|
|
@@ -108,6 +111,11 @@ const sitesCreateSubcommand = async (args, remotionRoot) => {
|
|
|
108
111
|
sizeUploaded: 1,
|
|
109
112
|
totalSize: 1,
|
|
110
113
|
doneIn: uploadDuration,
|
|
114
|
+
stats: {
|
|
115
|
+
addedFiles: stats.uploadedFiles,
|
|
116
|
+
removedFiles: stats.deletedFiles,
|
|
117
|
+
untouchedFiles: stats.untouchedFiles,
|
|
118
|
+
},
|
|
111
119
|
};
|
|
112
120
|
updateProgress();
|
|
113
121
|
log_1.Log.info();
|
|
@@ -117,6 +125,8 @@ const sitesCreateSubcommand = async (args, remotionRoot) => {
|
|
|
117
125
|
log_1.Log.info(`Site Name: ${siteName}`);
|
|
118
126
|
log_1.Log.info();
|
|
119
127
|
log_1.Log.info(cli_1.CliInternals.chalk.blueBright('ℹ️ If you make changes to your code, you need to redeploy the site. You can overwrite the existing site by running:'));
|
|
120
|
-
log_1.Log.info(cli_1.CliInternals.chalk.blueBright(
|
|
128
|
+
log_1.Log.info(cli_1.CliInternals.chalk.blueBright(['npx remotion lambda sites create', args[0], `--site-name=${siteName}`]
|
|
129
|
+
.filter(remotion_1.Internals.truthy)
|
|
130
|
+
.join(' ')));
|
|
121
131
|
};
|
|
122
132
|
exports.sitesCreateSubcommand = sitesCreateSubcommand;
|
|
@@ -9,9 +9,16 @@ export declare type BucketCreationProgress = {
|
|
|
9
9
|
doneIn: number | null;
|
|
10
10
|
};
|
|
11
11
|
export declare const makeBucketProgress: ({ bucketCreated, websiteEnabled, doneIn, }: BucketCreationProgress) => string;
|
|
12
|
+
declare type UploadStats = {
|
|
13
|
+
addedFiles: number;
|
|
14
|
+
removedFiles: number;
|
|
15
|
+
untouchedFiles: number;
|
|
16
|
+
};
|
|
12
17
|
export declare type DeployToS3Progress = {
|
|
13
18
|
sizeUploaded: number;
|
|
14
19
|
totalSize: number | null;
|
|
15
20
|
doneIn: number | null;
|
|
21
|
+
stats: UploadStats | null;
|
|
16
22
|
};
|
|
17
|
-
export declare const makeDeployProgressBar: ({ sizeUploaded, totalSize, doneIn, }: DeployToS3Progress) => string;
|
|
23
|
+
export declare const makeDeployProgressBar: ({ sizeUploaded, totalSize, doneIn, stats, }: DeployToS3Progress) => string;
|
|
24
|
+
export {};
|
|
@@ -30,7 +30,22 @@ const makeBucketProgress = ({ bucketCreated, websiteEnabled, doneIn, }) => {
|
|
|
30
30
|
].join(' ');
|
|
31
31
|
};
|
|
32
32
|
exports.makeBucketProgress = makeBucketProgress;
|
|
33
|
-
const
|
|
33
|
+
const makeUploadDiff = ({ stats }) => {
|
|
34
|
+
if (!stats) {
|
|
35
|
+
return null;
|
|
36
|
+
}
|
|
37
|
+
if (stats.addedFiles === 0 && stats.removedFiles === 0) {
|
|
38
|
+
return cli_1.CliInternals.chalk.gray(`(Unchanged)`);
|
|
39
|
+
}
|
|
40
|
+
const total = stats.addedFiles + stats.removedFiles;
|
|
41
|
+
return cli_1.CliInternals.chalk.gray(`(${[
|
|
42
|
+
stats.addedFiles ? `+${stats.addedFiles}` : null,
|
|
43
|
+
stats.removedFiles ? `-${stats.removedFiles}` : null,
|
|
44
|
+
]
|
|
45
|
+
.filter(remotion_1.Internals.truthy)
|
|
46
|
+
.join(',')} ${total === 1 ? 'file' : 'files'})`);
|
|
47
|
+
};
|
|
48
|
+
const makeDeployProgressBar = ({ sizeUploaded, totalSize, doneIn, stats, }) => {
|
|
34
49
|
const progress = totalSize === null ? 0 : sizeUploaded / totalSize;
|
|
35
50
|
return [
|
|
36
51
|
'☁️ ',
|
|
@@ -42,6 +57,7 @@ const makeDeployProgressBar = ({ sizeUploaded, totalSize, doneIn, }) => {
|
|
|
42
57
|
? `${cli_1.CliInternals.formatBytes(sizeUploaded)}/${cli_1.CliInternals.formatBytes(totalSize)}`
|
|
43
58
|
: ''
|
|
44
59
|
: cli_1.CliInternals.chalk.gray(`${doneIn}ms`),
|
|
60
|
+
makeUploadDiff({ stats }),
|
|
45
61
|
]
|
|
46
62
|
.filter(remotion_1.Internals.truthy)
|
|
47
63
|
.join(' ');
|
package/dist/cli/index.js
CHANGED
|
@@ -97,7 +97,7 @@ const matchCommand = (args, remotionRoot) => {
|
|
|
97
97
|
(0, quit_1.quit)(1);
|
|
98
98
|
};
|
|
99
99
|
const executeCommand = async (args, remotionRoot) => {
|
|
100
|
-
var _a, _b;
|
|
100
|
+
var _a, _b, _c;
|
|
101
101
|
try {
|
|
102
102
|
(0, is_cli_1.setIsCli)(true);
|
|
103
103
|
await matchCommand(args, remotionRoot);
|
|
@@ -131,6 +131,11 @@ AWS returned an "AccessDenied" error message meaning a permission is missing. Re
|
|
|
131
131
|
log_1.Log.error(`
|
|
132
132
|
AWS returned an "TooManyRequestsException" error message which could mean you reached the concurrency limit of AWS Lambda. You can increase the limit - read this troubleshooting page: ${docs_url_1.DOCS_URL}/docs/lambda/troubleshooting/rate-limit. The original error message is:
|
|
133
133
|
`.trim());
|
|
134
|
+
}
|
|
135
|
+
if ((_c = error.stack) === null || _c === void 0 ? void 0 : _c.includes('The security token included in the request is invalid')) {
|
|
136
|
+
log_1.Log.error(`
|
|
137
|
+
AWS returned an error message "The security token included in the request is invalid". A possible reason for this is that you did not enable the region in your AWS account under "Account". The original message is:
|
|
138
|
+
`);
|
|
134
139
|
}
|
|
135
140
|
log_1.Log.error(error.stack);
|
|
136
141
|
(0, quit_1.quit)(1);
|
|
@@ -2,22 +2,26 @@ import type { FfmpegExecutable } from '@remotion/renderer';
|
|
|
2
2
|
import type { AwsRegion } from '../../pricing/aws-regions';
|
|
3
3
|
import type { LambdaCodec } from '../../shared/validate-lambda-codec';
|
|
4
4
|
import type { EnhancedErrorInfo } from './write-lambda-error';
|
|
5
|
-
export declare const
|
|
5
|
+
export declare const getAllFilesS3: ({ bucket, expectedFiles, outdir, renderId, region, expectedBucketOwner, onErrors, }: {
|
|
6
6
|
bucket: string;
|
|
7
7
|
expectedFiles: number;
|
|
8
|
-
|
|
9
|
-
onErrors: (errors: EnhancedErrorInfo[]) => Promise<void>;
|
|
10
|
-
numberOfFrames: number;
|
|
8
|
+
outdir: string;
|
|
11
9
|
renderId: string;
|
|
12
10
|
region: AwsRegion;
|
|
13
|
-
codec: LambdaCodec;
|
|
14
11
|
expectedBucketOwner: string;
|
|
12
|
+
onErrors: (errors: EnhancedErrorInfo[]) => Promise<void>;
|
|
13
|
+
}) => Promise<string[]>;
|
|
14
|
+
export declare const concatVideosS3: ({ onProgress, numberOfFrames, codec, fps, numberOfGifLoops, ffmpegExecutable, remotionRoot, files, outdir, }: {
|
|
15
|
+
onProgress: (frames: number) => void;
|
|
16
|
+
numberOfFrames: number;
|
|
17
|
+
codec: LambdaCodec;
|
|
15
18
|
fps: number;
|
|
16
19
|
numberOfGifLoops: number | null;
|
|
17
20
|
ffmpegExecutable: FfmpegExecutable;
|
|
18
21
|
remotionRoot: string;
|
|
22
|
+
files: string[];
|
|
23
|
+
outdir: string;
|
|
19
24
|
}) => Promise<{
|
|
20
25
|
outfile: string;
|
|
21
26
|
cleanupChunksProm: Promise<void>;
|
|
22
|
-
encodingStart: number;
|
|
23
27
|
}>;
|
|
@@ -23,7 +23,7 @@ var __importStar = (this && this.__importStar) || function (mod) {
|
|
|
23
23
|
return result;
|
|
24
24
|
};
|
|
25
25
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
26
|
-
exports.concatVideosS3 = void 0;
|
|
26
|
+
exports.concatVideosS3 = exports.getAllFilesS3 = void 0;
|
|
27
27
|
const renderer_1 = require("@remotion/renderer");
|
|
28
28
|
const fs_1 = __importStar(require("fs"));
|
|
29
29
|
const path_1 = __importStar(require("path"));
|
|
@@ -127,34 +127,18 @@ const getAllFilesS3 = ({ bucket, expectedFiles, outdir, renderId, region, expect
|
|
|
127
127
|
loop().catch((err) => reject(err));
|
|
128
128
|
});
|
|
129
129
|
};
|
|
130
|
-
|
|
130
|
+
exports.getAllFilesS3 = getAllFilesS3;
|
|
131
|
+
const concatVideosS3 = async ({ onProgress, numberOfFrames, codec, fps, numberOfGifLoops, ffmpegExecutable, remotionRoot, files, outdir, }) => {
|
|
131
132
|
var _a;
|
|
132
|
-
const outdir = (0, path_1.join)(renderer_1.RenderInternals.tmpDir(constants_1.CONCAT_FOLDER_TOKEN), 'bucket');
|
|
133
|
-
if ((0, fs_1.existsSync)(outdir)) {
|
|
134
|
-
(fs_1.rmSync !== null && fs_1.rmSync !== void 0 ? fs_1.rmSync : fs_1.rmdirSync)(outdir, {
|
|
135
|
-
recursive: true,
|
|
136
|
-
});
|
|
137
|
-
}
|
|
138
|
-
(0, fs_1.mkdirSync)(outdir);
|
|
139
|
-
const files = await getAllFilesS3({
|
|
140
|
-
bucket,
|
|
141
|
-
expectedFiles,
|
|
142
|
-
outdir,
|
|
143
|
-
renderId,
|
|
144
|
-
region,
|
|
145
|
-
expectedBucketOwner,
|
|
146
|
-
onErrors,
|
|
147
|
-
});
|
|
148
133
|
const outfile = (0, path_1.join)(renderer_1.RenderInternals.tmpDir(constants_1.REMOTION_CONCATED_TOKEN), 'concat.' + renderer_1.RenderInternals.getFileExtensionFromCodec(codec, 'final'));
|
|
149
134
|
const combine = (0, timer_1.timer)('Combine videos');
|
|
150
135
|
const filelistDir = renderer_1.RenderInternals.tmpDir(constants_1.REMOTION_FILELIST_TOKEN);
|
|
151
|
-
const encodingStart = Date.now();
|
|
152
136
|
const codecForCombining = codec === 'h264-mkv' ? 'h264' : codec;
|
|
153
137
|
await (0, renderer_1.combineVideos)({
|
|
154
138
|
files,
|
|
155
139
|
filelistDir,
|
|
156
140
|
output: outfile,
|
|
157
|
-
onProgress: (p) => onProgress(p
|
|
141
|
+
onProgress: (p) => onProgress(p),
|
|
158
142
|
numberOfFrames,
|
|
159
143
|
codec: codecForCombining,
|
|
160
144
|
fps,
|
|
@@ -166,6 +150,6 @@ const concatVideosS3 = async ({ bucket, expectedFiles, onProgress, numberOfFrame
|
|
|
166
150
|
const cleanupChunksProm = ((_a = fs_1.default.promises.rm) !== null && _a !== void 0 ? _a : fs_1.default.promises.rmdir)(outdir, {
|
|
167
151
|
recursive: true,
|
|
168
152
|
});
|
|
169
|
-
return { outfile, cleanupChunksProm
|
|
153
|
+
return { outfile, cleanupChunksProm };
|
|
170
154
|
};
|
|
171
155
|
exports.concatVideosS3 = concatVideosS3;
|
|
@@ -8,7 +8,6 @@ const parse_lambda_timings_key_1 = require("../../shared/parse-lambda-timings-ke
|
|
|
8
8
|
const calculate_chunk_times_1 = require("./calculate-chunk-times");
|
|
9
9
|
const get_current_architecture_1 = require("./get-current-architecture");
|
|
10
10
|
const get_files_to_delete_1 = require("./get-files-to-delete");
|
|
11
|
-
const get_lambdas_invoked_stats_1 = require("./get-lambdas-invoked-stats");
|
|
12
11
|
const get_retry_stats_1 = require("./get-retry-stats");
|
|
13
12
|
const get_time_to_finish_1 = require("./get-time-to-finish");
|
|
14
13
|
const createPostRenderData = ({ renderId, region, memorySizeInMb, renderMetadata, contents, timeToEncode, errorExplanations, timeToDelete, outputFile, }) => {
|
|
@@ -41,17 +40,7 @@ const createPostRenderData = ({ renderId, region, memorySizeInMb, renderMetadata
|
|
|
41
40
|
const renderSize = contents
|
|
42
41
|
.map((c) => { var _a; return (_a = c.Size) !== null && _a !== void 0 ? _a : 0; })
|
|
43
42
|
.reduce((a, b) => a + b, 0);
|
|
44
|
-
const { timeToInvokeLambdas } = (0, get_lambdas_invoked_stats_1.getLambdasInvokedStats)({
|
|
45
|
-
contents,
|
|
46
|
-
renderId,
|
|
47
|
-
estimatedRenderLambdaInvokations: renderMetadata.estimatedRenderLambdaInvokations,
|
|
48
|
-
startDate: renderMetadata.startedDate,
|
|
49
|
-
checkIfAllLambdasWereInvoked: false,
|
|
50
|
-
});
|
|
51
43
|
const retriesInfo = (0, get_retry_stats_1.getRetryStats)({ contents, renderId });
|
|
52
|
-
if (timeToInvokeLambdas === null) {
|
|
53
|
-
throw new Error('should have timing for all lambdas');
|
|
54
|
-
}
|
|
55
44
|
return {
|
|
56
45
|
cost: {
|
|
57
46
|
currency: 'USD',
|
|
@@ -81,7 +70,6 @@ const createPostRenderData = ({ renderId, region, memorySizeInMb, renderMetadata
|
|
|
81
70
|
renderId,
|
|
82
71
|
type: 'absolute-time',
|
|
83
72
|
}),
|
|
84
|
-
timeToInvokeLambdas,
|
|
85
73
|
retriesInfo,
|
|
86
74
|
mostExpensiveFrameRanges: (0, get_most_expensive_chunks_1.getMostExpensiveChunks)(parsedTimings, renderMetadata.framesPerLambda),
|
|
87
75
|
};
|
|
@@ -2,9 +2,10 @@
|
|
|
2
2
|
var _a;
|
|
3
3
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
4
4
|
exports.executablePath = void 0;
|
|
5
|
-
if (/^AWS_Lambda_nodejs(?:10|12|14)[.]x$/.test((_a = process.env.AWS_EXECUTION_ENV) !== null && _a !== void 0 ? _a : '') === true) {
|
|
5
|
+
if (/^AWS_Lambda_nodejs(?:10|12|14|16|18)[.]x$/.test((_a = process.env.AWS_EXECUTION_ENV) !== null && _a !== void 0 ? _a : '') === true) {
|
|
6
6
|
if (process.env.FONTCONFIG_PATH === undefined) {
|
|
7
7
|
process.env.FONTCONFIG_PATH = '/opt';
|
|
8
|
+
process.env.FONTCONFIG_FILE = '/opt/fonts.conf';
|
|
8
9
|
}
|
|
9
10
|
process.env.LD_LIBRARY_PATH = '/opt/lib:/opt/bin';
|
|
10
11
|
}
|
|
File without changes
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
"use strict";
|
|
@@ -1 +1 @@
|
|
|
1
|
-
export declare const getCurrentRegionInFunction: () => "eu-central-1" | "eu-west-1" | "eu-west-2" | "us-east-1" | "us-east-2" | "us-west-2" | "ap-south-1" | "ap-southeast-1" | "ap-southeast-2" | "ap-northeast-1";
|
|
1
|
+
export declare const getCurrentRegionInFunction: () => "eu-central-1" | "eu-west-1" | "eu-west-2" | "eu-west-3" | "eu-south-1" | "eu-north-1" | "us-east-1" | "us-east-2" | "us-west-1" | "us-west-2" | "af-south-1" | "ap-south-1" | "ap-east-1" | "ap-southeast-1" | "ap-southeast-2" | "ap-northeast-1" | "ap-northeast-2" | "ap-northeast-3" | "ca-central-1" | "me-south-1" | "sa-east-1";
|
|
@@ -1,9 +1,5 @@
|
|
|
1
|
+
import type { _Object } from '@aws-sdk/client-s3';
|
|
1
2
|
import type { EncodingProgress } from '../../defaults';
|
|
2
|
-
|
|
3
|
-
|
|
4
|
-
|
|
5
|
-
bucketName: string;
|
|
6
|
-
renderId: string;
|
|
7
|
-
region: AwsRegion;
|
|
8
|
-
expectedBucketOwner: string;
|
|
9
|
-
}) => Promise<EncodingProgress | null>;
|
|
3
|
+
export declare const getEncodingMetadata: ({ exists, }: {
|
|
4
|
+
exists: _Object | undefined;
|
|
5
|
+
}) => EncodingProgress | null;
|
|
@@ -1,26 +1,12 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
3
|
exports.getEncodingMetadata = void 0;
|
|
4
|
-
const
|
|
5
|
-
const
|
|
6
|
-
const io_1 = require("./io");
|
|
7
|
-
const getEncodingMetadata = async ({ exists, bucketName, renderId, region, expectedBucketOwner, }) => {
|
|
4
|
+
const chunk_progress_1 = require("../../shared/chunk-progress");
|
|
5
|
+
const getEncodingMetadata = ({ exists, }) => {
|
|
8
6
|
if (!exists) {
|
|
9
7
|
return null;
|
|
10
8
|
}
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
bucketName,
|
|
14
|
-
key: (0, defaults_1.encodingProgressKey)(renderId),
|
|
15
|
-
region,
|
|
16
|
-
expectedBucketOwner,
|
|
17
|
-
});
|
|
18
|
-
const encodingProgress = JSON.parse(await (0, stream_to_string_1.streamToString)(Body));
|
|
19
|
-
return encodingProgress;
|
|
20
|
-
}
|
|
21
|
-
catch (err) {
|
|
22
|
-
// The file may not yet have been fully written or already have been cleaned up again
|
|
23
|
-
return null;
|
|
24
|
-
}
|
|
9
|
+
const framesEncoded = (0, chunk_progress_1.getProgressOfChunk)(exists.ETag);
|
|
10
|
+
return { framesEncoded };
|
|
25
11
|
};
|
|
26
12
|
exports.getEncodingMetadata = getEncodingMetadata;
|
|
@@ -1,8 +1,6 @@
|
|
|
1
1
|
import type { EncodingProgress, RenderMetadata } from '../../shared/constants';
|
|
2
|
-
|
|
3
|
-
|
|
4
|
-
encodingStatus: EncodingProgress | null;
|
|
2
|
+
export declare const getFinalEncodingStatus: ({ encodingProgress, renderMetadata, outputFileExists, }: {
|
|
3
|
+
encodingProgress: EncodingProgress | null;
|
|
5
4
|
renderMetadata: RenderMetadata | null;
|
|
6
5
|
outputFileExists: boolean;
|
|
7
|
-
lambdaInvokeStatus: LambdaInvokeStats;
|
|
8
6
|
}) => EncodingProgress | null;
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
3
|
exports.getFinalEncodingStatus = void 0;
|
|
4
|
-
const getFinalEncodingStatus = ({
|
|
4
|
+
const getFinalEncodingStatus = ({ encodingProgress, renderMetadata, outputFileExists, }) => {
|
|
5
5
|
if (!renderMetadata) {
|
|
6
6
|
return null;
|
|
7
7
|
}
|
|
@@ -11,9 +11,6 @@ const getFinalEncodingStatus = ({ encodingStatus: encodingProgress, renderMetada
|
|
|
11
11
|
if (outputFileExists) {
|
|
12
12
|
return {
|
|
13
13
|
framesEncoded: renderMetadata.videoConfig.durationInFrames,
|
|
14
|
-
totalFrames: renderMetadata.videoConfig.durationInFrames,
|
|
15
|
-
doneIn: null,
|
|
16
|
-
timeToInvoke: lambdaInvokeStatus.timeToInvokeLambdas,
|
|
17
14
|
};
|
|
18
15
|
}
|
|
19
16
|
return null;
|
|
@@ -1,12 +1,8 @@
|
|
|
1
1
|
import type { _Object } from '@aws-sdk/client-s3';
|
|
2
2
|
export declare type LambdaInvokeStats = {
|
|
3
|
-
timeToInvokeLambdas: number | null;
|
|
4
3
|
lambdasInvoked: number;
|
|
5
4
|
};
|
|
6
|
-
export declare const getLambdasInvokedStats: ({ contents, renderId,
|
|
5
|
+
export declare const getLambdasInvokedStats: ({ contents, renderId, }: {
|
|
7
6
|
contents: _Object[];
|
|
8
7
|
renderId: string;
|
|
9
|
-
estimatedRenderLambdaInvokations: number | null;
|
|
10
|
-
startDate: number | null;
|
|
11
|
-
checkIfAllLambdasWereInvoked: boolean;
|
|
12
8
|
}) => LambdaInvokeStats;
|
|
@@ -3,19 +3,11 @@ Object.defineProperty(exports, "__esModule", { value: true });
|
|
|
3
3
|
exports.getLambdasInvokedStats = void 0;
|
|
4
4
|
const constants_1 = require("../../shared/constants");
|
|
5
5
|
const parse_lambda_initialized_key_1 = require("../../shared/parse-lambda-initialized-key");
|
|
6
|
-
const
|
|
7
|
-
const getLambdasInvokedStats = ({ contents, renderId, estimatedRenderLambdaInvokations, startDate, checkIfAllLambdasWereInvoked, }) => {
|
|
8
|
-
var _a;
|
|
6
|
+
const getLambdasInvokedStats = ({ contents, renderId, }) => {
|
|
9
7
|
const lambdasInvoked = contents
|
|
10
8
|
.filter((c) => { var _a; return (_a = c.Key) === null || _a === void 0 ? void 0 : _a.startsWith((0, constants_1.lambdaChunkInitializedPrefix)(renderId)); })
|
|
11
9
|
.filter((c) => (0, parse_lambda_initialized_key_1.parseLambdaInitializedKey)(c.Key).attempt === 1);
|
|
12
|
-
const allLambdasInvoked = !checkIfAllLambdasWereInvoked ||
|
|
13
|
-
lambdasInvoked.length === estimatedRenderLambdaInvokations;
|
|
14
|
-
const timeToInvokeLambdas = !allLambdasInvoked || startDate === null
|
|
15
|
-
? null
|
|
16
|
-
: ((_a = (0, min_max_1.max)(lambdasInvoked.map((l) => { var _a; return (_a = l.LastModified) === null || _a === void 0 ? void 0 : _a.getTime(); }))) !== null && _a !== void 0 ? _a : 0) - startDate;
|
|
17
10
|
return {
|
|
18
|
-
timeToInvokeLambdas,
|
|
19
11
|
lambdasInvoked: lambdasInvoked.length,
|
|
20
12
|
};
|
|
21
13
|
};
|
|
@@ -1,6 +1,7 @@
|
|
|
1
|
-
export declare const getOverallProgress: ({ cleanup, encoding, rendering, invoking, }: {
|
|
1
|
+
export declare const getOverallProgress: ({ cleanup, encoding, rendering, invoking, frames, }: {
|
|
2
2
|
cleanup: number;
|
|
3
3
|
encoding: number;
|
|
4
4
|
rendering: number;
|
|
5
5
|
invoking: number;
|
|
6
|
+
frames: number;
|
|
6
7
|
}) => number;
|
|
@@ -3,14 +3,16 @@ Object.defineProperty(exports, "__esModule", { value: true });
|
|
|
3
3
|
exports.getOverallProgress = void 0;
|
|
4
4
|
const weights = {
|
|
5
5
|
cleanup: 0.1,
|
|
6
|
-
encoding: 0.
|
|
7
|
-
rendering: 0.
|
|
8
|
-
|
|
6
|
+
encoding: 0.225,
|
|
7
|
+
rendering: 0.225,
|
|
8
|
+
frames: 0.225,
|
|
9
|
+
invoking: 0.225,
|
|
9
10
|
};
|
|
10
|
-
const getOverallProgress = ({ cleanup, encoding, rendering, invoking, }) => {
|
|
11
|
+
const getOverallProgress = ({ cleanup, encoding, rendering, invoking, frames, }) => {
|
|
11
12
|
return (cleanup * weights.cleanup +
|
|
12
13
|
encoding * weights.encoding +
|
|
13
14
|
rendering * weights.rendering +
|
|
14
|
-
invoking * weights.invoking
|
|
15
|
+
invoking * weights.invoking +
|
|
16
|
+
frames * weights.frames);
|
|
15
17
|
};
|
|
16
18
|
exports.getOverallProgress = getOverallProgress;
|
|
@@ -1,7 +1,9 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
3
|
exports.getProgress = void 0;
|
|
4
|
+
const renderer_1 = require("@remotion/renderer");
|
|
4
5
|
const remotion_1 = require("remotion");
|
|
6
|
+
const chunk_progress_1 = require("../../shared/chunk-progress");
|
|
5
7
|
const constants_1 = require("../../shared/constants");
|
|
6
8
|
const docs_url_1 = require("../../shared/docs-url");
|
|
7
9
|
const calculate_chunk_times_1 = require("./calculate-chunk-times");
|
|
@@ -19,12 +21,13 @@ const get_lambdas_invoked_stats_1 = require("./get-lambdas-invoked-stats");
|
|
|
19
21
|
const get_overall_progress_1 = require("./get-overall-progress");
|
|
20
22
|
const get_post_render_data_1 = require("./get-post-render-data");
|
|
21
23
|
const get_render_metadata_1 = require("./get-render-metadata");
|
|
24
|
+
const get_rendered_frames_progress_1 = require("./get-rendered-frames-progress");
|
|
22
25
|
const get_retry_stats_1 = require("./get-retry-stats");
|
|
23
26
|
const get_time_to_finish_1 = require("./get-time-to-finish");
|
|
24
27
|
const inspect_errors_1 = require("./inspect-errors");
|
|
25
28
|
const io_1 = require("./io");
|
|
26
29
|
const getProgress = async ({ bucketName, renderId, expectedBucketOwner, region, memorySizeInMb, timeoutInMilliseconds, customCredentials, }) => {
|
|
27
|
-
var _a, _b, _c, _d, _e, _f, _g, _h
|
|
30
|
+
var _a, _b, _c, _d, _e, _f, _g, _h;
|
|
28
31
|
const postRenderData = await (0, get_post_render_data_1.getPostRenderData)({
|
|
29
32
|
bucketName,
|
|
30
33
|
region,
|
|
@@ -33,7 +36,9 @@ const getProgress = async ({ bucketName, renderId, expectedBucketOwner, region,
|
|
|
33
36
|
});
|
|
34
37
|
if (postRenderData) {
|
|
35
38
|
const outData = (0, expected_out_name_1.getExpectedOutName)(postRenderData.renderMetadata, bucketName, customCredentials);
|
|
39
|
+
const totalFrameCount = renderer_1.RenderInternals.getFramesToRender(postRenderData.renderMetadata.frameRange, postRenderData.renderMetadata.everyNthFrame).length;
|
|
36
40
|
return {
|
|
41
|
+
framesRendered: totalFrameCount,
|
|
37
42
|
bucket: bucketName,
|
|
38
43
|
renderSize: postRenderData.renderSize,
|
|
39
44
|
chunks: postRenderData.renderMetadata.totalChunks,
|
|
@@ -51,10 +56,7 @@ const getProgress = async ({ bucketName, renderId, expectedBucketOwner, region,
|
|
|
51
56
|
currentTime: Date.now(),
|
|
52
57
|
done: true,
|
|
53
58
|
encodingStatus: {
|
|
54
|
-
framesEncoded:
|
|
55
|
-
totalFrames: postRenderData.renderMetadata.videoConfig.durationInFrames,
|
|
56
|
-
doneIn: postRenderData.timeToEncode,
|
|
57
|
-
timeToInvoke: postRenderData.timeToInvokeLambdas,
|
|
59
|
+
framesEncoded: totalFrameCount,
|
|
58
60
|
},
|
|
59
61
|
errors: postRenderData.errors,
|
|
60
62
|
fatalErrorEncountered: false,
|
|
@@ -64,12 +66,12 @@ const getProgress = async ({ bucketName, renderId, expectedBucketOwner, region,
|
|
|
64
66
|
renderMetadata: postRenderData.renderMetadata,
|
|
65
67
|
timeToFinish: postRenderData.timeToFinish,
|
|
66
68
|
timeToFinishChunks: postRenderData.timeToRenderChunks,
|
|
67
|
-
timeToInvokeLambdas: postRenderData.timeToInvokeLambdas,
|
|
68
69
|
overallProgress: 1,
|
|
69
70
|
retriesInfo: postRenderData.retriesInfo,
|
|
70
71
|
outKey: outData.key,
|
|
71
72
|
outBucket: outData.renderBucketName,
|
|
72
73
|
mostExpensiveFrameRanges: (_a = postRenderData.mostExpensiveFrameRanges) !== null && _a !== void 0 ? _a : null,
|
|
74
|
+
timeToEncode: postRenderData.timeToEncode,
|
|
73
75
|
};
|
|
74
76
|
}
|
|
75
77
|
const contents = await (0, io_1.lambdaLs)({
|
|
@@ -80,14 +82,10 @@ const getProgress = async ({ bucketName, renderId, expectedBucketOwner, region,
|
|
|
80
82
|
});
|
|
81
83
|
(0, check_if_render_exists_1.checkIfRenderExists)(contents, renderId, bucketName, (0, get_current_region_1.getCurrentRegionInFunction)());
|
|
82
84
|
const renderMetadataExists = Boolean(contents.find((c) => c.Key === (0, constants_1.renderMetadataKey)(renderId)));
|
|
83
|
-
const
|
|
84
|
-
(0,
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
renderId,
|
|
88
|
-
region: (0, get_current_region_1.getCurrentRegionInFunction)(),
|
|
89
|
-
expectedBucketOwner,
|
|
90
|
-
}),
|
|
85
|
+
const encodingStatus = (0, get_encoding_metadata_1.getEncodingMetadata)({
|
|
86
|
+
exists: contents.find((c) => c.Key === (0, constants_1.encodingProgressKey)(renderId)),
|
|
87
|
+
});
|
|
88
|
+
const [renderMetadata, errorExplanations] = await Promise.all([
|
|
91
89
|
renderMetadataExists
|
|
92
90
|
? (0, get_render_metadata_1.getRenderMetadata)({
|
|
93
91
|
bucketName,
|
|
@@ -137,6 +135,20 @@ const getProgress = async ({ bucketName, renderId, expectedBucketOwner, region,
|
|
|
137
135
|
renderMetadata,
|
|
138
136
|
});
|
|
139
137
|
const chunks = contents.filter((c) => { var _a; return (_a = c.Key) === null || _a === void 0 ? void 0 : _a.startsWith((0, constants_1.chunkKey)(renderId)); });
|
|
138
|
+
const framesRendered = renderMetadata
|
|
139
|
+
? (0, get_rendered_frames_progress_1.getRenderedFramesProgress)({
|
|
140
|
+
contents,
|
|
141
|
+
everyNthFrame: renderMetadata.everyNthFrame,
|
|
142
|
+
frameRange: renderMetadata.frameRange,
|
|
143
|
+
framesPerLambda: renderMetadata.framesPerLambda,
|
|
144
|
+
renderId,
|
|
145
|
+
})
|
|
146
|
+
: 0;
|
|
147
|
+
console.log('etags', contents
|
|
148
|
+
.filter((c) => { var _a; return (_a = c.Key) === null || _a === void 0 ? void 0 : _a.startsWith((0, constants_1.lambdaChunkInitializedPrefix)(renderId)); })
|
|
149
|
+
.map((c) => {
|
|
150
|
+
return (0, chunk_progress_1.getProgressOfChunk)(c.ETag);
|
|
151
|
+
}));
|
|
140
152
|
const allChunks = chunks.length === ((_f = renderMetadata === null || renderMetadata === void 0 ? void 0 : renderMetadata.totalChunks) !== null && _f !== void 0 ? _f : Infinity);
|
|
141
153
|
const renderSize = contents
|
|
142
154
|
.map((c) => { var _a; return (_a = c.Size) !== null && _a !== void 0 ? _a : 0; })
|
|
@@ -144,22 +156,18 @@ const getProgress = async ({ bucketName, renderId, expectedBucketOwner, region,
|
|
|
144
156
|
const lambdasInvokedStats = (0, get_lambdas_invoked_stats_1.getLambdasInvokedStats)({
|
|
145
157
|
contents,
|
|
146
158
|
renderId,
|
|
147
|
-
estimatedRenderLambdaInvokations: (_g = renderMetadata === null || renderMetadata === void 0 ? void 0 : renderMetadata.estimatedRenderLambdaInvokations) !== null && _g !== void 0 ? _g : null,
|
|
148
|
-
startDate: (_h = renderMetadata === null || renderMetadata === void 0 ? void 0 : renderMetadata.startedDate) !== null && _h !== void 0 ? _h : null,
|
|
149
|
-
checkIfAllLambdasWereInvoked: true,
|
|
150
159
|
});
|
|
151
160
|
const retriesInfo = (0, get_retry_stats_1.getRetryStats)({
|
|
152
161
|
contents,
|
|
153
162
|
renderId,
|
|
154
163
|
});
|
|
155
164
|
const finalEncodingStatus = (0, get_final_encoding_status_1.getFinalEncodingStatus)({
|
|
156
|
-
encodingStatus,
|
|
165
|
+
encodingProgress: encodingStatus,
|
|
157
166
|
outputFileExists: Boolean(outputFile),
|
|
158
167
|
renderMetadata,
|
|
159
|
-
lambdaInvokeStatus: lambdasInvokedStats,
|
|
160
168
|
});
|
|
161
169
|
const chunkCount = outputFile
|
|
162
|
-
? (
|
|
170
|
+
? (_g = renderMetadata === null || renderMetadata === void 0 ? void 0 : renderMetadata.totalChunks) !== null && _g !== void 0 ? _g : 0
|
|
163
171
|
: chunks.length;
|
|
164
172
|
// We add a 20 second buffer for it, since AWS timeshifts can be quite a lot. Once it's 20sec over the limit, we consider it timed out
|
|
165
173
|
const isBeyondTimeout = renderMetadata &&
|
|
@@ -182,7 +190,11 @@ const getProgress = async ({ bucketName, renderId, expectedBucketOwner, region,
|
|
|
182
190
|
: null,
|
|
183
191
|
...errorExplanations,
|
|
184
192
|
].filter(remotion_1.Internals.truthy);
|
|
193
|
+
const frameCount = renderMetadata
|
|
194
|
+
? renderer_1.RenderInternals.getFramesToRender(renderMetadata.frameRange, renderMetadata.everyNthFrame).length
|
|
195
|
+
: null;
|
|
185
196
|
return {
|
|
197
|
+
framesRendered,
|
|
186
198
|
chunks: chunkCount,
|
|
187
199
|
done: false,
|
|
188
200
|
encodingStatus,
|
|
@@ -190,7 +202,7 @@ const getProgress = async ({ bucketName, renderId, expectedBucketOwner, region,
|
|
|
190
202
|
renderId,
|
|
191
203
|
renderMetadata,
|
|
192
204
|
bucket: bucketName,
|
|
193
|
-
outputFile: (
|
|
205
|
+
outputFile: (_h = outputFile === null || outputFile === void 0 ? void 0 : outputFile.url) !== null && _h !== void 0 ? _h : null,
|
|
194
206
|
timeToFinish,
|
|
195
207
|
errors: allErrors,
|
|
196
208
|
fatalErrorEncountered: allErrors.some((f) => f.isFatal && !f.willRetry),
|
|
@@ -205,18 +217,17 @@ const getProgress = async ({ bucketName, renderId, expectedBucketOwner, region,
|
|
|
205
217
|
type: 'absolute-time',
|
|
206
218
|
})
|
|
207
219
|
: null,
|
|
208
|
-
timeToInvokeLambdas: (_l = encodingStatus === null || encodingStatus === void 0 ? void 0 : encodingStatus.timeToInvoke) !== null && _l !== void 0 ? _l : lambdasInvokedStats.timeToInvokeLambdas,
|
|
209
220
|
overallProgress: (0, get_overall_progress_1.getOverallProgress)({
|
|
210
221
|
cleanup: cleanup ? cleanup.filesDeleted / cleanup.minFilesToDelete : 0,
|
|
211
|
-
encoding: finalEncodingStatus && renderMetadata
|
|
212
|
-
? finalEncodingStatus.framesEncoded /
|
|
213
|
-
renderMetadata.videoConfig.durationInFrames
|
|
222
|
+
encoding: finalEncodingStatus && renderMetadata && frameCount
|
|
223
|
+
? finalEncodingStatus.framesEncoded / frameCount
|
|
214
224
|
: 0,
|
|
215
225
|
invoking: renderMetadata
|
|
216
226
|
? lambdasInvokedStats.lambdasInvoked /
|
|
217
227
|
renderMetadata.estimatedRenderLambdaInvokations
|
|
218
228
|
: 0,
|
|
219
229
|
rendering: renderMetadata ? chunkCount / renderMetadata.totalChunks : 0,
|
|
230
|
+
frames: frameCount === null ? 0 : framesRendered / frameCount,
|
|
220
231
|
}),
|
|
221
232
|
retriesInfo,
|
|
222
233
|
outKey: outputFile && renderMetadata
|
|
@@ -227,6 +238,7 @@ const getProgress = async ({ bucketName, renderId, expectedBucketOwner, region,
|
|
|
227
238
|
.renderBucketName
|
|
228
239
|
: null,
|
|
229
240
|
mostExpensiveFrameRanges: null,
|
|
241
|
+
timeToEncode: null,
|
|
230
242
|
};
|
|
231
243
|
};
|
|
232
244
|
exports.getProgress = getProgress;
|
|
@@ -0,0 +1,8 @@
|
|
|
1
|
+
import type { _Object } from '@aws-sdk/client-s3';
|
|
2
|
+
export declare const getRenderedFramesProgress: ({ contents, renderId, framesPerLambda, everyNthFrame, frameRange, }: {
|
|
3
|
+
contents: _Object[];
|
|
4
|
+
renderId: string;
|
|
5
|
+
framesPerLambda: number;
|
|
6
|
+
frameRange: [number, number];
|
|
7
|
+
everyNthFrame: number;
|
|
8
|
+
}) => number;
|
|
@@ -0,0 +1,37 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.getRenderedFramesProgress = void 0;
|
|
4
|
+
const chunk_progress_1 = require("../../shared/chunk-progress");
|
|
5
|
+
const constants_1 = require("../../shared/constants");
|
|
6
|
+
const parse_chunk_key_1 = require("../../shared/parse-chunk-key");
|
|
7
|
+
const parse_lambda_initialized_key_1 = require("../../shared/parse-lambda-initialized-key");
|
|
8
|
+
const plan_frame_ranges_1 = require("../chunk-optimization/plan-frame-ranges");
|
|
9
|
+
const getRenderedFramesProgress = ({ contents, renderId, framesPerLambda, everyNthFrame, frameRange, }) => {
|
|
10
|
+
const chunkProgress = {};
|
|
11
|
+
const { chunks } = (0, plan_frame_ranges_1.planFrameRanges)({
|
|
12
|
+
everyNthFrame,
|
|
13
|
+
frameRange,
|
|
14
|
+
framesPerLambda,
|
|
15
|
+
});
|
|
16
|
+
// Sort, so only the latest attempt is used
|
|
17
|
+
const sortedChunks = contents
|
|
18
|
+
.filter((c) => {
|
|
19
|
+
return c.Key.startsWith((0, constants_1.lambdaChunkInitializedPrefix)(renderId));
|
|
20
|
+
})
|
|
21
|
+
.sort((a, b) => {
|
|
22
|
+
return a.Key.localeCompare(b.Key);
|
|
23
|
+
});
|
|
24
|
+
for (const chunk of sortedChunks) {
|
|
25
|
+
const key = (0, parse_lambda_initialized_key_1.parseLambdaInitializedKey)(chunk.Key);
|
|
26
|
+
chunkProgress[key.chunk] = (0, chunk_progress_1.getProgressOfChunk)(chunk.ETag);
|
|
27
|
+
}
|
|
28
|
+
for (const chunk of contents.filter((c) => { var _a; return (_a = c.Key) === null || _a === void 0 ? void 0 : _a.startsWith((0, constants_1.chunkKey)(renderId)); })) {
|
|
29
|
+
const parsed = (0, parse_chunk_key_1.parseLambdaChunkKey)(chunk.Key);
|
|
30
|
+
const frameRangeInChunk = chunks[parsed.chunk];
|
|
31
|
+
chunkProgress[parsed.chunk] =
|
|
32
|
+
frameRangeInChunk[1] - frameRangeInChunk[0] + 1;
|
|
33
|
+
}
|
|
34
|
+
const framesRendered = Object.values(chunkProgress).reduce((a, b) => a + b, 0);
|
|
35
|
+
return framesRendered;
|
|
36
|
+
};
|
|
37
|
+
exports.getRenderedFramesProgress = getRenderedFramesProgress;
|