@remotion/lambda 4.0.179 → 4.0.181

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (48) hide show
  1. package/dist/api/deploy-site.d.ts +3 -3
  2. package/dist/api/upload-dir.js +30 -31
  3. package/dist/cli/commands/still.js +7 -4
  4. package/dist/functions/helpers/cleanup-props.d.ts +1 -4
  5. package/dist/internals.d.ts +5 -5
  6. package/dist/shared/is-flaky-error.js +1 -0
  7. package/package.json +9 -9
  8. package/remotionlambda-arm64.zip +0 -0
  9. package/dist/functions/helpers/check-if-render-exists.d.ts +0 -3
  10. package/dist/functions/helpers/check-if-render-exists.js +0 -15
  11. package/dist/functions/helpers/delete-chunks.d.ts +0 -9
  12. package/dist/functions/helpers/delete-chunks.js +0 -25
  13. package/dist/functions/helpers/get-cleanup-progress.d.ts +0 -10
  14. package/dist/functions/helpers/get-cleanup-progress.js +0 -35
  15. package/dist/functions/helpers/get-encoding-metadata.d.ts +0 -7
  16. package/dist/functions/helpers/get-encoding-metadata.js +0 -15
  17. package/dist/functions/helpers/get-encoding-progress-step-size.d.ts +0 -1
  18. package/dist/functions/helpers/get-encoding-progress-step-size.js +0 -7
  19. package/dist/functions/helpers/get-files-to-delete.d.ts +0 -10
  20. package/dist/functions/helpers/get-files-to-delete.js +0 -52
  21. package/dist/functions/helpers/get-final-encoding-status.d.ts +0 -6
  22. package/dist/functions/helpers/get-final-encoding-status.js +0 -18
  23. package/dist/functions/helpers/get-folder-size.d.ts +0 -1
  24. package/dist/functions/helpers/get-folder-size.js +0 -8
  25. package/dist/functions/helpers/get-lambdas-invoked-stats.d.ts +0 -8
  26. package/dist/functions/helpers/get-lambdas-invoked-stats.js +0 -14
  27. package/dist/functions/helpers/get-post-render-data.d.ts +0 -8
  28. package/dist/functions/helpers/get-post-render-data.js +0 -22
  29. package/dist/functions/helpers/get-render-metadata.d.ts +0 -8
  30. package/dist/functions/helpers/get-render-metadata.js +0 -17
  31. package/dist/functions/helpers/get-rendered-frames-progress.d.ts +0 -8
  32. package/dist/functions/helpers/get-rendered-frames-progress.js +0 -37
  33. package/dist/functions/helpers/get-time-to-finish.d.ts +0 -5
  34. package/dist/functions/helpers/get-time-to-finish.js +0 -13
  35. package/dist/functions/helpers/streaming-payloads.d.ts +0 -19
  36. package/dist/functions/helpers/streaming-payloads.js +0 -25
  37. package/dist/functions/helpers/write-post-render-data.d.ts +0 -9
  38. package/dist/functions/helpers/write-post-render-data.js +0 -18
  39. package/dist/functions/merge.d.ts +0 -9
  40. package/dist/functions/merge.js +0 -61
  41. package/dist/shared/chunk-progress.d.ts +0 -9
  42. package/dist/shared/chunk-progress.js +0 -2034
  43. package/dist/shared/parse-chunk-key.d.ts +0 -5
  44. package/dist/shared/parse-chunk-key.js +0 -15
  45. package/dist/shared/parse-lambda-initialized-key.d.ts +0 -5
  46. package/dist/shared/parse-lambda-initialized-key.js +0 -15
  47. package/dist/shared/serialize-props.d.ts +0 -14
  48. package/dist/shared/serialize-props.js +0 -36
@@ -59,11 +59,11 @@ export declare const internalDeploySite: (args_0: MandatoryParameters & {
59
59
  getValue: ({ commandLine }: {
60
60
  commandLine: Record<string, unknown>;
61
61
  }) => {
62
- value: "verbose" | "info" | "warn" | "error";
62
+ value: "error" | "verbose" | "info" | "warn";
63
63
  source: string;
64
64
  };
65
- setConfig: (newLogLevel: "verbose" | "info" | "warn" | "error") => void;
66
- type: "verbose" | "info" | "warn" | "error";
65
+ setConfig: (newLogLevel: "error" | "verbose" | "info" | "warn") => void;
66
+ type: "error" | "verbose" | "info" | "warn";
67
67
  };
68
68
  readonly throwIfSiteExists: {
69
69
  cliFlag: string;
@@ -9,8 +9,8 @@ const mime_types_1 = __importDefault(require("mime-types"));
9
9
  const node_fs_1 = require("node:fs");
10
10
  const node_path_1 = __importDefault(require("node:path"));
11
11
  const aws_clients_1 = require("../shared/aws-clients");
12
- const chunk_1 = require("../shared/chunk");
13
12
  const make_s3_key_1 = require("../shared/make-s3-key");
13
+ const p_limit_1 = require("../shared/p-limit");
14
14
  const getDirFiles = (entry) => {
15
15
  throw new TypeError('should only be executed in test ' + JSON.stringify(entry));
16
16
  };
@@ -45,6 +45,7 @@ async function getFiles(directory, originalDirectory, toUpload) {
45
45
  }));
46
46
  return _files.flat(1);
47
47
  }
48
+ const limit = (0, p_limit_1.pLimit)(50);
48
49
  const uploadDir = async ({ bucket, region, localDir, onProgress, keyPrefix, privacy, toUpload, }) => {
49
50
  const files = await getFiles(localDir, localDir, toUpload);
50
51
  const progresses = {};
@@ -52,38 +53,36 @@ const uploadDir = async ({ bucket, region, localDir, onProgress, keyPrefix, priv
52
53
  progresses[file.name] = 0;
53
54
  }
54
55
  const client = (0, aws_clients_1.getS3Client)(region, null);
55
- const chunkedFiles = (0, chunk_1.chunk)(files, 200);
56
56
  const uploadAll = (async () => {
57
- for (const filesChunk of chunkedFiles) {
58
- const uploads = filesChunk.map((filePath) => {
59
- const Key = (0, make_s3_key_1.makeS3Key)(keyPrefix, localDir, filePath.name);
60
- const Body = (0, node_fs_1.createReadStream)(filePath.name);
61
- const ContentType = mime_types_1.default.lookup(Key) || 'application/octet-stream';
62
- const ACL = privacy === 'no-acl'
63
- ? undefined
64
- : privacy === 'private'
65
- ? 'private'
66
- : 'public-read';
67
- const paralellUploads3 = new lib_storage_1.Upload({
68
- client,
69
- queueSize: 4,
70
- partSize: 5 * 1024 * 1024,
71
- params: {
72
- Key,
73
- Bucket: bucket,
74
- Body,
75
- ACL,
76
- ContentType,
77
- },
78
- });
79
- paralellUploads3.on('httpUploadProgress', (progress) => {
80
- var _a;
81
- progresses[filePath.name] = (_a = progress.loaded) !== null && _a !== void 0 ? _a : 0;
82
- });
83
- return paralellUploads3.done();
57
+ const uploads = files.map((filePath) => limit(async () => {
58
+ const Key = (0, make_s3_key_1.makeS3Key)(keyPrefix, localDir, filePath.name);
59
+ const Body = (0, node_fs_1.createReadStream)(filePath.name);
60
+ const ContentType = mime_types_1.default.lookup(Key) || 'application/octet-stream';
61
+ const ACL = privacy === 'no-acl'
62
+ ? undefined
63
+ : privacy === 'private'
64
+ ? 'private'
65
+ : 'public-read';
66
+ const paralellUploads3 = new lib_storage_1.Upload({
67
+ client,
68
+ queueSize: 4,
69
+ partSize: 5 * 1024 * 1024,
70
+ params: {
71
+ Key,
72
+ Bucket: bucket,
73
+ Body,
74
+ ACL,
75
+ ContentType,
76
+ },
84
77
  });
85
- await Promise.all(uploads);
86
- }
78
+ paralellUploads3.on('httpUploadProgress', (progress) => {
79
+ var _a;
80
+ progresses[filePath.name] = (_a = progress.loaded) !== null && _a !== void 0 ? _a : 0;
81
+ });
82
+ const prom = await paralellUploads3.done();
83
+ return prom;
84
+ }));
85
+ await Promise.all(uploads);
87
86
  })();
88
87
  const interval = setInterval(() => {
89
88
  onProgress({
@@ -174,10 +174,13 @@ const stillCommand = async (args, remotionRoot, logLevel) => {
174
174
  });
175
175
  log_1.Log.info({ indent: false, logLevel }, cli_1.CliInternals.chalk.gray(`Render ID: ${cli_1.CliInternals.makeHyperlink({ text: res.renderId, fallback: res.renderId, url: (0, get_aws_urls_1.getS3RenderUrl)({ bucketName: res.bucketName, renderId: res.renderId, region: (0, get_aws_region_1.getAwsRegion)() }) })}`));
176
176
  log_1.Log.info({ indent: false, logLevel }, cli_1.CliInternals.chalk.gray(`Bucket: ${cli_1.CliInternals.makeHyperlink({ text: res.bucketName, fallback: res.bucketName, url: `https://${(0, get_aws_region_1.getAwsRegion)()}.console.aws.amazon.com/s3/buckets/${res.bucketName}/?region=${(0, get_aws_region_1.getAwsRegion)()}` })}`));
177
- log_1.Log.info({
178
- indent: false,
179
- logLevel,
180
- }, (0, progress_1.makeArtifactProgress)(res.artifacts));
177
+ const artifactProgress = (0, progress_1.makeArtifactProgress)(res.artifacts);
178
+ if (artifactProgress) {
179
+ log_1.Log.info({
180
+ indent: false,
181
+ logLevel,
182
+ }, (0, progress_1.makeArtifactProgress)(res.artifacts));
183
+ }
181
184
  log_1.Log.info({ indent: false, logLevel }, chalk.blue('+ S3'.padEnd(cli_1.CliInternals.LABEL_WIDTH)), chalk.blue(cli_1.CliInternals.makeHyperlink({
182
185
  fallback: res.url,
183
186
  url: res.url,
@@ -2,7 +2,4 @@ import type { SerializedInputProps } from '../../defaults';
2
2
  export declare const cleanupProps: ({ serializedResolvedProps, inputProps, }: {
3
3
  serializedResolvedProps: SerializedInputProps;
4
4
  inputProps: SerializedInputProps;
5
- }) => Promise<[
6
- number,
7
- number
8
- ]>;
5
+ }) => Promise<[number, number]>;
@@ -1,5 +1,5 @@
1
1
  export declare const LambdaInternals: {
2
- executeCommand: (args: string[], remotionRoot: string, logLevel: "verbose" | "info" | "warn" | "error") => Promise<void>;
2
+ executeCommand: (args: string[], remotionRoot: string, logLevel: "error" | "verbose" | "info" | "warn") => Promise<void>;
3
3
  makeLambdaRenderMediaPayload: ({ rendererFunctionName, frameRange, framesPerLambda, forceBucketName: bucketName, codec, composition, serveUrl, imageFormat, inputProps, region, crf, envVariables, pixelFormat, proResProfile, x264Preset, maxRetries, privacy, logLevel, outName, timeoutInMilliseconds, chromiumOptions, scale, everyNthFrame, numberOfGifLoops, audioBitrate, concurrencyPerLambda, audioCodec, forceHeight, forceWidth, webhook, videoBitrate, encodingMaxRate, encodingBufferSize, downloadBehavior, muted, overwrite, jpegQuality, offthreadVideoCacheSizeInBytes, deleteAfter, colorSpace, preferLossless, }: import("./api/make-lambda-payload").InnerRenderMediaOnLambdaInput) => Promise<import("./defaults").LambdaStartPayload>;
4
4
  getRenderProgressPayload: ({ bucketName, renderId, s3OutputProvider, logLevel, }: import("./client").GetRenderProgressInput) => import("./defaults").LambdaStatusPayload;
5
5
  makeLambdaRenderStillPayload: ({ serveUrl, inputProps, imageFormat, envVariables, quality, jpegQuality, region, maxRetries, composition, privacy, frame, logLevel, outName, timeoutInMilliseconds, chromiumOptions, scale, downloadBehavior, forceHeight, forceWidth, forceBucketName, offthreadVideoCacheSizeInBytes, deleteAfter, }: import("./api/render-still-on-lambda").RenderStillOnLambdaNonNullInput) => Promise<{
@@ -14,7 +14,7 @@ export declare const LambdaInternals: {
14
14
  maxRetries: number;
15
15
  frame: number;
16
16
  privacy: import("./defaults").Privacy;
17
- logLevel: "verbose" | "info" | "warn" | "error";
17
+ logLevel: "error" | "verbose" | "info" | "warn";
18
18
  outName: import("./defaults").OutNameInput | null;
19
19
  timeoutInMilliseconds: number;
20
20
  chromiumOptions: import("@remotion/renderer").ChromiumOptions;
@@ -57,11 +57,11 @@ export declare const LambdaInternals: {
57
57
  getValue: ({ commandLine }: {
58
58
  commandLine: Record<string, unknown>;
59
59
  }) => {
60
- value: "verbose" | "info" | "warn" | "error";
60
+ value: "error" | "verbose" | "info" | "warn";
61
61
  source: string;
62
62
  };
63
- setConfig: (newLogLevel: "verbose" | "info" | "warn" | "error") => void;
64
- type: "verbose" | "info" | "warn" | "error";
63
+ setConfig: (newLogLevel: "error" | "verbose" | "info" | "warn") => void;
64
+ type: "error" | "verbose" | "info" | "warn";
65
65
  };
66
66
  readonly throwIfSiteExists: {
67
67
  cliFlag: string;
@@ -56,6 +56,7 @@ const isFlakyError = (err) => {
56
56
  // Internet flakiness
57
57
  if (message.includes('getaddrinfo') ||
58
58
  message.includes('ECONNRESET') ||
59
+ message.includes('ERR_CONNECTION_TIMED_OUT') ||
59
60
  message.includes('socket hang up')) {
60
61
  return true;
61
62
  }
package/package.json CHANGED
@@ -3,7 +3,7 @@
3
3
  "url": "https://github.com/remotion-dev/remotion/tree/main/packages/lambda"
4
4
  },
5
5
  "name": "@remotion/lambda",
6
- "version": "4.0.179",
6
+ "version": "4.0.181",
7
7
  "description": "Render Remotion videos on AWS Lambda",
8
8
  "main": "dist/index.js",
9
9
  "sideEffects": false,
@@ -22,11 +22,11 @@
22
22
  "@aws-sdk/s3-request-presigner": "3.583.0",
23
23
  "mime-types": "2.1.34",
24
24
  "zod": "3.22.3",
25
- "@remotion/renderer": "4.0.179",
26
- "@remotion/bundler": "4.0.179",
27
- "@remotion/cli": "4.0.179",
28
- "remotion": "4.0.179",
29
- "@remotion/streaming": "4.0.179"
25
+ "@remotion/bundler": "4.0.181",
26
+ "@remotion/cli": "4.0.181",
27
+ "@remotion/renderer": "4.0.181",
28
+ "remotion": "4.0.181",
29
+ "@remotion/streaming": "4.0.181"
30
30
  },
31
31
  "devDependencies": {
32
32
  "@types/mime-types": "2.1.1",
@@ -35,11 +35,11 @@
35
35
  "pureimage": "0.4.13",
36
36
  "vitest": "0.31.1",
37
37
  "zip-lib": "^0.7.2",
38
- "@remotion/bundler": "4.0.179",
39
- "@remotion/compositor-linux-arm64-gnu": "4.0.179"
38
+ "@remotion/bundler": "4.0.181",
39
+ "@remotion/compositor-linux-arm64-gnu": "4.0.181"
40
40
  },
41
41
  "peerDependencies": {
42
- "@remotion/bundler": "4.0.179"
42
+ "@remotion/bundler": "4.0.181"
43
43
  },
44
44
  "publishConfig": {
45
45
  "access": "public"
Binary file
@@ -1,3 +0,0 @@
1
- import type { _Object } from '@aws-sdk/client-s3';
2
- import type { AwsRegion } from '../../client';
3
- export declare const checkIfRenderExists: (contents: _Object[], renderId: string, bucketName: string, region: AwsRegion) => void;
@@ -1,15 +0,0 @@
1
- "use strict";
2
- Object.defineProperty(exports, "__esModule", { value: true });
3
- exports.checkIfRenderExists = void 0;
4
- const constants_1 = require("../../shared/constants");
5
- const checkIfRenderExists = (contents, renderId, bucketName, region) => {
6
- const initializedExists = Boolean(contents.find((c) => {
7
- var _a;
8
- return (_a = c.Key) === null || _a === void 0 ? void 0 : _a.startsWith((0, constants_1.initalizedMetadataKey)(renderId));
9
- }));
10
- if (!initializedExists) {
11
- // ! Error message is checked in progress handler and will be retried. Make sure to update
12
- throw new TypeError(`No render with ID "${renderId}" found in bucket ${bucketName} and region ${region}`);
13
- }
14
- };
15
- exports.checkIfRenderExists = checkIfRenderExists;
@@ -1,9 +0,0 @@
1
- import type { _Object } from '@aws-sdk/client-s3';
2
- import type { AwsRegion } from '../../pricing/aws-regions';
3
- import type { CleanupJob } from './get-files-to-delete';
4
- export declare const cleanupFiles: ({ bucket, region, contents, jobs, }: {
5
- bucket: string;
6
- region: AwsRegion;
7
- contents: _Object[];
8
- jobs: CleanupJob[];
9
- }) => Promise<number>;
@@ -1,25 +0,0 @@
1
- "use strict";
2
- Object.defineProperty(exports, "__esModule", { value: true });
3
- exports.cleanupFiles = void 0;
4
- const clean_items_1 = require("../../api/clean-items");
5
- const cleanupFiles = async ({ bucket, region, contents, jobs, }) => {
6
- const start = Date.now();
7
- await (0, clean_items_1.cleanItems)({
8
- bucket,
9
- region,
10
- list: jobs.map((item) => {
11
- var _a;
12
- if (item.type === 'exact') {
13
- return item.name;
14
- }
15
- if (item.type === 'prefix') {
16
- return (_a = contents.find((c) => { var _a; return (_a = c.Key) === null || _a === void 0 ? void 0 : _a.startsWith(item.name); })) === null || _a === void 0 ? void 0 : _a.Key;
17
- }
18
- throw new Error('unexpected in deleteChunks()');
19
- }),
20
- onAfterItemDeleted: () => undefined,
21
- onBeforeItemDeleted: () => undefined,
22
- });
23
- return Date.now() - start;
24
- };
25
- exports.cleanupFiles = cleanupFiles;
@@ -1,10 +0,0 @@
1
- import type { _Object } from '@aws-sdk/client-s3';
2
- import type { CleanupInfo } from '../../shared/constants';
3
- export declare const getCleanupProgress: ({ contents, output, chunkCount, renderId, hasAudio, hasVideo, }: {
4
- contents: _Object[];
5
- output: string | null;
6
- chunkCount: number;
7
- renderId: string;
8
- hasAudio: boolean;
9
- hasVideo: boolean;
10
- }) => null | CleanupInfo;
@@ -1,35 +0,0 @@
1
- "use strict";
2
- Object.defineProperty(exports, "__esModule", { value: true });
3
- exports.getCleanupProgress = void 0;
4
- const get_files_to_delete_1 = require("./get-files-to-delete");
5
- const getCleanupProgress = ({ contents, output, chunkCount, renderId, hasAudio, hasVideo, }) => {
6
- if (output === null) {
7
- return null;
8
- }
9
- const filesToDelete = (0, get_files_to_delete_1.getFilesToDelete)({
10
- chunkCount,
11
- renderId,
12
- hasAudio,
13
- hasVideo,
14
- });
15
- const filesStillThere = contents.filter((c) => {
16
- return filesToDelete.find((f) => {
17
- var _a;
18
- if (f.type === 'exact') {
19
- return f.name === c.Key;
20
- }
21
- if (f.type === 'prefix') {
22
- return (_a = c.Key) === null || _a === void 0 ? void 0 : _a.startsWith(f.name);
23
- }
24
- throw new Error('Unexpected in getCleanupProgress');
25
- });
26
- });
27
- const filesDeleted = Math.max(0, filesToDelete.length - filesStillThere.length);
28
- return {
29
- minFilesToDelete: filesToDelete.length,
30
- filesDeleted,
31
- // We don't know. Only if post render data is saved, we know the timing
32
- doneIn: null,
33
- };
34
- };
35
- exports.getCleanupProgress = getCleanupProgress;
@@ -1,7 +0,0 @@
1
- import type { _Object } from '@aws-sdk/client-s3';
2
- import type { EncodingProgress } from '../../defaults';
3
- export declare const getEncodingMetadata: ({ exists, frameCount, stepSize, }: {
4
- exists: _Object | undefined;
5
- frameCount: number;
6
- stepSize: number;
7
- }) => EncodingProgress | null;
@@ -1,15 +0,0 @@
1
- "use strict";
2
- Object.defineProperty(exports, "__esModule", { value: true });
3
- exports.getEncodingMetadata = void 0;
4
- const chunk_progress_1 = require("../../shared/chunk-progress");
5
- const getEncodingMetadata = ({ exists, frameCount, stepSize, }) => {
6
- if (!exists) {
7
- return null;
8
- }
9
- const framesEncoded = (0, chunk_progress_1.getProgressOfChunk)(exists.ETag);
10
- // We only report every 100 frames encoded so that we are able to report up to 2000 * 100 ETags => 200000 frames
11
- return {
12
- framesEncoded: Math.min(frameCount, framesEncoded * stepSize),
13
- };
14
- };
15
- exports.getEncodingMetadata = getEncodingMetadata;
@@ -1 +0,0 @@
1
- export declare const getEncodingProgressStepSize: (totalFrames: number) => number;
@@ -1,7 +0,0 @@
1
- "use strict";
2
- Object.defineProperty(exports, "__esModule", { value: true });
3
- exports.getEncodingProgressStepSize = void 0;
4
- const getEncodingProgressStepSize = (totalFrames) => {
5
- return Math.min(100, Math.max(5, totalFrames / 10));
6
- };
7
- exports.getEncodingProgressStepSize = getEncodingProgressStepSize;
@@ -1,10 +0,0 @@
1
- export type CleanupJob = {
2
- name: string;
3
- type: 'exact' | 'prefix';
4
- };
5
- export declare const getFilesToDelete: ({ chunkCount, renderId, hasVideo, hasAudio, }: {
6
- chunkCount: number;
7
- renderId: string;
8
- hasVideo: boolean;
9
- hasAudio: boolean;
10
- }) => CleanupJob[];
@@ -1,52 +0,0 @@
1
- "use strict";
2
- Object.defineProperty(exports, "__esModule", { value: true });
3
- exports.getFilesToDelete = void 0;
4
- const constants_1 = require("../../shared/constants");
5
- const getFilesToDelete = ({ chunkCount, renderId, hasVideo, hasAudio, }) => {
6
- const videoChunks = hasVideo
7
- ? new Array(chunkCount).fill(true).map((_x, i) => (0, constants_1.chunkKeyForIndex)({
8
- index: i,
9
- renderId,
10
- type: 'video',
11
- }))
12
- : [];
13
- const audioChunks = hasAudio
14
- ? new Array(chunkCount).fill(true).map((_x, i) => (0, constants_1.chunkKeyForIndex)({
15
- index: i,
16
- renderId,
17
- type: 'audio',
18
- }))
19
- : [];
20
- const lambdaTimings = new Array(chunkCount)
21
- .fill(true)
22
- .map((_x, i) => (0, constants_1.lambdaTimingsPrefixForChunk)(renderId, i));
23
- return [
24
- {
25
- name: (0, constants_1.lambdaChunkInitializedPrefix)(renderId),
26
- type: 'prefix',
27
- },
28
- ...videoChunks.map((i) => {
29
- return {
30
- name: i,
31
- type: 'exact',
32
- };
33
- }),
34
- ...audioChunks.map((i) => {
35
- return {
36
- name: i,
37
- type: 'exact',
38
- };
39
- }),
40
- ...lambdaTimings.map((i) => {
41
- return {
42
- name: i,
43
- type: 'prefix',
44
- };
45
- }),
46
- {
47
- name: (0, constants_1.encodingProgressKey)(renderId),
48
- type: 'exact',
49
- },
50
- ];
51
- };
52
- exports.getFilesToDelete = getFilesToDelete;
@@ -1,6 +0,0 @@
1
- import type { EncodingProgress, RenderMetadata } from '../../shared/constants';
2
- export declare const getFinalEncodingStatus: ({ encodingProgress, renderMetadata, outputFileExists, }: {
3
- encodingProgress: EncodingProgress | null;
4
- renderMetadata: RenderMetadata | null;
5
- outputFileExists: boolean;
6
- }) => EncodingProgress | null;
@@ -1,18 +0,0 @@
1
- "use strict";
2
- Object.defineProperty(exports, "__esModule", { value: true });
3
- exports.getFinalEncodingStatus = void 0;
4
- const getFinalEncodingStatus = ({ encodingProgress, renderMetadata, outputFileExists, }) => {
5
- if (!renderMetadata) {
6
- return null;
7
- }
8
- if (encodingProgress) {
9
- return encodingProgress;
10
- }
11
- if (outputFileExists) {
12
- return {
13
- framesEncoded: renderMetadata.videoConfig.durationInFrames,
14
- };
15
- }
16
- return null;
17
- };
18
- exports.getFinalEncodingStatus = getFinalEncodingStatus;
@@ -1 +0,0 @@
1
- export declare function getFolderSizeRecursively(folder: string): number;
@@ -1,8 +0,0 @@
1
- "use strict";
2
- Object.defineProperty(exports, "__esModule", { value: true });
3
- exports.getFolderSizeRecursively = void 0;
4
- const get_files_in_folder_1 = require("./get-files-in-folder");
5
- function getFolderSizeRecursively(folder) {
6
- return (0, get_files_in_folder_1.getFolderFiles)(folder).reduce((a, b) => a + b.size, 0);
7
- }
8
- exports.getFolderSizeRecursively = getFolderSizeRecursively;
@@ -1,8 +0,0 @@
1
- import type { _Object } from '@aws-sdk/client-s3';
2
- export type LambdaInvokeStats = {
3
- lambdasInvoked: number;
4
- };
5
- export declare const getLambdasInvokedStats: ({ contents, renderId, }: {
6
- contents: _Object[];
7
- renderId: string;
8
- }) => LambdaInvokeStats;
@@ -1,14 +0,0 @@
1
- "use strict";
2
- Object.defineProperty(exports, "__esModule", { value: true });
3
- exports.getLambdasInvokedStats = void 0;
4
- const constants_1 = require("../../shared/constants");
5
- const parse_lambda_initialized_key_1 = require("../../shared/parse-lambda-initialized-key");
6
- const getLambdasInvokedStats = ({ contents, renderId, }) => {
7
- const lambdasInvoked = contents
8
- .filter((c) => { var _a; return (_a = c.Key) === null || _a === void 0 ? void 0 : _a.startsWith((0, constants_1.lambdaChunkInitializedPrefix)(renderId)); })
9
- .filter((c) => (0, parse_lambda_initialized_key_1.parseLambdaInitializedKey)(c.Key).attempt === 1);
10
- return {
11
- lambdasInvoked: lambdasInvoked.length,
12
- };
13
- };
14
- exports.getLambdasInvokedStats = getLambdasInvokedStats;
@@ -1,8 +0,0 @@
1
- import type { AwsRegion } from '../../pricing/aws-regions';
2
- import type { PostRenderData } from '../../shared/constants';
3
- export declare const getPostRenderData: ({ bucketName, renderId, region, expectedBucketOwner, }: {
4
- bucketName: string;
5
- renderId: string;
6
- region: AwsRegion;
7
- expectedBucketOwner: string;
8
- }) => Promise<PostRenderData | null>;
@@ -1,22 +0,0 @@
1
- "use strict";
2
- Object.defineProperty(exports, "__esModule", { value: true });
3
- exports.getPostRenderData = void 0;
4
- const constants_1 = require("../../shared/constants");
5
- const stream_to_string_1 = require("../../shared/stream-to-string");
6
- const io_1 = require("./io");
7
- const getPostRenderData = async ({ bucketName, renderId, region, expectedBucketOwner, }) => {
8
- try {
9
- const data = await (0, io_1.lambdaReadFile)({
10
- bucketName,
11
- key: (0, constants_1.postRenderDataKey)(renderId),
12
- region,
13
- expectedBucketOwner,
14
- });
15
- return JSON.parse(await (0, stream_to_string_1.streamToString)(data));
16
- }
17
- catch (err) {
18
- // Does not exist
19
- return null;
20
- }
21
- };
22
- exports.getPostRenderData = getPostRenderData;
@@ -1,8 +0,0 @@
1
- import type { AwsRegion } from '../../pricing/aws-regions';
2
- import type { RenderMetadata } from '../../shared/constants';
3
- export declare const getRenderMetadata: ({ bucketName, renderId, region, expectedBucketOwner, }: {
4
- bucketName: string;
5
- renderId: string;
6
- region: AwsRegion;
7
- expectedBucketOwner: string;
8
- }) => Promise<RenderMetadata>;
@@ -1,17 +0,0 @@
1
- "use strict";
2
- Object.defineProperty(exports, "__esModule", { value: true });
3
- exports.getRenderMetadata = void 0;
4
- const constants_1 = require("../../shared/constants");
5
- const stream_to_string_1 = require("../../shared/stream-to-string");
6
- const io_1 = require("./io");
7
- const getRenderMetadata = async ({ bucketName, renderId, region, expectedBucketOwner, }) => {
8
- const Body = await (0, io_1.lambdaReadFile)({
9
- bucketName,
10
- key: (0, constants_1.renderMetadataKey)(renderId),
11
- region,
12
- expectedBucketOwner,
13
- });
14
- const renderMetadataResponse = JSON.parse(await (0, stream_to_string_1.streamToString)(Body));
15
- return renderMetadataResponse;
16
- };
17
- exports.getRenderMetadata = getRenderMetadata;
@@ -1,8 +0,0 @@
1
- import type { _Object } from '@aws-sdk/client-s3';
2
- export declare const getRenderedFramesProgress: ({ contents, renderId, framesPerLambda, everyNthFrame, frameRange, }: {
3
- contents: _Object[];
4
- renderId: string;
5
- framesPerLambda: number;
6
- frameRange: [number, number];
7
- everyNthFrame: number;
8
- }) => number;
@@ -1,37 +0,0 @@
1
- "use strict";
2
- Object.defineProperty(exports, "__esModule", { value: true });
3
- exports.getRenderedFramesProgress = void 0;
4
- const chunk_progress_1 = require("../../shared/chunk-progress");
5
- const constants_1 = require("../../shared/constants");
6
- const parse_chunk_key_1 = require("../../shared/parse-chunk-key");
7
- const parse_lambda_initialized_key_1 = require("../../shared/parse-lambda-initialized-key");
8
- const plan_frame_ranges_1 = require("../chunk-optimization/plan-frame-ranges");
9
- const getRenderedFramesProgress = ({ contents, renderId, framesPerLambda, everyNthFrame, frameRange, }) => {
10
- const chunkProgress = {};
11
- const { chunks } = (0, plan_frame_ranges_1.planFrameRanges)({
12
- everyNthFrame,
13
- frameRange,
14
- framesPerLambda,
15
- });
16
- // Sort, so only the latest attempt is used
17
- const sortedChunks = contents
18
- .filter((c) => {
19
- return c.Key.startsWith((0, constants_1.lambdaChunkInitializedPrefix)(renderId));
20
- })
21
- .sort((a, b) => {
22
- return a.Key.localeCompare(b.Key);
23
- });
24
- for (const chunk of sortedChunks) {
25
- const key = (0, parse_lambda_initialized_key_1.parseLambdaInitializedKey)(chunk.Key);
26
- chunkProgress[key.chunk] = (0, chunk_progress_1.getProgressOfChunk)(chunk.ETag);
27
- }
28
- for (const chunk of contents.filter((c) => { var _a; return (_a = c.Key) === null || _a === void 0 ? void 0 : _a.startsWith((0, constants_1.chunkKey)(renderId)); })) {
29
- const parsed = (0, parse_chunk_key_1.parseLambdaChunkKey)(chunk.Key);
30
- const frameRangeInChunk = chunks[parsed.chunk];
31
- chunkProgress[parsed.chunk] =
32
- frameRangeInChunk[1] - frameRangeInChunk[0] + 1;
33
- }
34
- const framesRendered = Object.values(chunkProgress).reduce((a, b) => a + b, 0);
35
- return framesRendered;
36
- };
37
- exports.getRenderedFramesProgress = getRenderedFramesProgress;
@@ -1,5 +0,0 @@
1
- import type { RenderMetadata } from '../../defaults';
2
- export declare const getTimeToFinish: ({ renderMetadata, lastModified, }: {
3
- renderMetadata: RenderMetadata | null;
4
- lastModified: number | null;
5
- }) => number | null;
@@ -1,13 +0,0 @@
1
- "use strict";
2
- Object.defineProperty(exports, "__esModule", { value: true });
3
- exports.getTimeToFinish = void 0;
4
- const getTimeToFinish = ({ renderMetadata, lastModified, }) => {
5
- if (!lastModified) {
6
- return null;
7
- }
8
- if (!renderMetadata) {
9
- return null;
10
- }
11
- return Math.max(0, lastModified - renderMetadata.startedDate);
12
- };
13
- exports.getTimeToFinish = getTimeToFinish;