@remotion/lambda 4.0.164 → 4.0.166

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (87) hide show
  1. package/dist/api/delete-render.js +7 -3
  2. package/dist/api/deploy-function.d.ts +0 -1
  3. package/dist/api/deploy-function.js +3 -3
  4. package/dist/api/download-media.js +6 -3
  5. package/dist/api/get-compositions-on-lambda.js +0 -2
  6. package/dist/api/get-render-progress.js +0 -2
  7. package/dist/api/make-lambda-payload.js +2 -1
  8. package/dist/api/render-media-on-lambda.js +0 -2
  9. package/dist/api/render-still-on-lambda.js +37 -25
  10. package/dist/cli/commands/render/progress.d.ts +3 -28
  11. package/dist/cli/commands/render/progress.js +89 -90
  12. package/dist/cli/commands/render/render.js +7 -40
  13. package/dist/cli/commands/sites/create.js +2 -2
  14. package/dist/functions/chunk-optimization/plan-frame-ranges.d.ts +4 -1
  15. package/dist/functions/chunk-optimization/types.d.ts +0 -3
  16. package/dist/functions/helpers/calculate-chunk-times.d.ts +3 -4
  17. package/dist/functions/helpers/calculate-chunk-times.js +4 -8
  18. package/dist/functions/helpers/calculate-price-from-bucket.d.ts +3 -5
  19. package/dist/functions/helpers/calculate-price-from-bucket.js +5 -18
  20. package/dist/functions/helpers/check-if-render-exists.js +1 -1
  21. package/dist/functions/helpers/clean-tmpdir.d.ts +0 -2
  22. package/dist/functions/helpers/clean-tmpdir.js +1 -7
  23. package/dist/functions/helpers/concat-videos.d.ts +1 -13
  24. package/dist/functions/helpers/concat-videos.js +7 -131
  25. package/dist/functions/helpers/create-post-render-data.d.ts +6 -6
  26. package/dist/functions/helpers/create-post-render-data.js +17 -37
  27. package/dist/functions/helpers/find-output-file-in-bucket.d.ts +1 -3
  28. package/dist/functions/helpers/find-output-file-in-bucket.js +1 -4
  29. package/dist/functions/helpers/get-cleanup-progress.d.ts +1 -3
  30. package/dist/functions/helpers/get-cleanup-progress.js +1 -3
  31. package/dist/functions/helpers/get-custom-out-name.js +1 -0
  32. package/dist/functions/helpers/get-files-to-delete.d.ts +1 -3
  33. package/dist/functions/helpers/get-files-to-delete.js +1 -35
  34. package/dist/functions/helpers/get-lambdas-invoked-stats.d.ts +0 -5
  35. package/dist/functions/helpers/get-lambdas-invoked-stats.js +0 -12
  36. package/dist/functions/helpers/get-overall-progress-s3.d.ts +4 -2
  37. package/dist/functions/helpers/get-overall-progress-s3.js +8 -6
  38. package/dist/functions/helpers/get-overall-progress.d.ts +1 -2
  39. package/dist/functions/helpers/get-overall-progress.js +2 -4
  40. package/dist/functions/helpers/get-progress.js +78 -154
  41. package/dist/functions/helpers/get-retry-stats.d.ts +0 -5
  42. package/dist/functions/helpers/get-retry-stats.js +0 -18
  43. package/dist/functions/helpers/inspect-errors.d.ts +4 -10
  44. package/dist/functions/helpers/inspect-errors.js +5 -27
  45. package/dist/functions/helpers/io.d.ts +0 -1
  46. package/dist/functions/helpers/io.js +2 -3
  47. package/dist/functions/helpers/lifecycle.d.ts +0 -4
  48. package/dist/functions/helpers/lifecycle.js +2 -3
  49. package/dist/functions/helpers/make-timeout-error.d.ts +0 -2
  50. package/dist/functions/helpers/merge-chunks.d.ts +5 -0
  51. package/dist/functions/helpers/merge-chunks.js +16 -143
  52. package/dist/functions/helpers/min-max.d.ts +1 -1
  53. package/dist/functions/helpers/min-max.js +1 -1
  54. package/dist/functions/helpers/overall-render-progress.d.ts +26 -4
  55. package/dist/functions/helpers/overall-render-progress.js +116 -27
  56. package/dist/functions/helpers/stream-renderer.js +15 -9
  57. package/dist/functions/helpers/streamify-response.d.ts +0 -3
  58. package/dist/functions/helpers/streamify-response.js +2 -14
  59. package/dist/functions/helpers/write-lambda-error.d.ts +3 -6
  60. package/dist/functions/helpers/write-lambda-error.js +1 -21
  61. package/dist/functions/index.d.ts +5 -0
  62. package/dist/functions/index.js +70 -38
  63. package/dist/functions/launch.js +103 -133
  64. package/dist/functions/renderer.d.ts +2 -2
  65. package/dist/functions/renderer.js +70 -123
  66. package/dist/functions/start.d.ts +1 -0
  67. package/dist/functions/start.js +3 -2
  68. package/dist/functions/still.d.ts +7 -2
  69. package/dist/functions/still.js +17 -34
  70. package/dist/functions/streaming/streaming.d.ts +15 -2
  71. package/dist/functions/streaming/streaming.js +8 -6
  72. package/dist/internals.d.ts +2 -1
  73. package/dist/shared/aws-clients.js +14 -1
  74. package/dist/shared/call-lambda.d.ts +5 -4
  75. package/dist/shared/call-lambda.js +53 -44
  76. package/dist/shared/chunk-progress.d.ts +1 -2
  77. package/dist/shared/chunk-progress.js +2 -2
  78. package/dist/shared/constants.d.ts +14 -35
  79. package/dist/shared/constants.js +3 -31
  80. package/dist/shared/content-disposition-header.d.ts +0 -3
  81. package/dist/shared/get-function-version.js +0 -2
  82. package/dist/shared/is-flaky-error.js +4 -0
  83. package/dist/shared/parse-lambda-timings-key.d.ts +0 -2
  84. package/dist/shared/parse-lambda-timings-key.js +0 -14
  85. package/dist/shared/return-values.d.ts +0 -5
  86. package/package.json +18 -18
  87. package/remotionlambda-arm64.zip +0 -0
@@ -4,108 +4,31 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
4
4
  };
5
5
  Object.defineProperty(exports, "__esModule", { value: true });
6
6
  exports.mergeChunksAndFinishRender = void 0;
7
- const renderer_1 = require("@remotion/renderer");
8
7
  const fs_1 = __importDefault(require("fs"));
9
- const node_fs_1 = require("node:fs");
10
- const node_path_1 = require("node:path");
11
8
  const cleanup_serialized_input_props_1 = require("../../shared/cleanup-serialized-input-props");
12
- const constants_1 = require("../../shared/constants");
13
- const truthy_1 = require("../../shared/truthy");
14
9
  const concat_videos_1 = require("./concat-videos");
15
10
  const create_post_render_data_1 = require("./create-post-render-data");
16
- const delete_chunks_1 = require("./delete-chunks");
17
11
  const get_current_region_1 = require("./get-current-region");
18
- const get_encoding_progress_step_size_1 = require("./get-encoding-progress-step-size");
19
- const get_files_to_delete_1 = require("./get-files-to-delete");
20
12
  const get_output_url_from_metadata_1 = require("./get-output-url-from-metadata");
21
13
  const inspect_errors_1 = require("./inspect-errors");
22
14
  const io_1 = require("./io");
23
- const render_has_audio_video_1 = require("./render-has-audio-video");
24
15
  const timer_1 = require("./timer");
25
- const write_lambda_error_1 = require("./write-lambda-error");
26
- const write_post_render_data_1 = require("./write-post-render-data");
27
16
  const mergeChunksAndFinishRender = async (options) => {
28
- let lastProgressUploaded = Date.now();
29
17
  const onProgress = (framesEncoded) => {
30
- const deltaSinceLastProgressUploaded = Date.now() - lastProgressUploaded;
31
- if (deltaSinceLastProgressUploaded < 1500 &&
32
- framesEncoded !== options.numberOfFrames) {
33
- return;
34
- }
35
- lastProgressUploaded = Date.now();
36
- (0, io_1.lambdaWriteFile)({
37
- bucketName: options.bucketName,
38
- key: (0, constants_1.encodingProgressKey)(options.renderId),
39
- body: String(Math.round(framesEncoded / (0, get_encoding_progress_step_size_1.getEncodingProgressStepSize)(options.numberOfFrames))),
40
- region: (0, get_current_region_1.getCurrentRegionInFunction)(),
41
- privacy: 'private',
42
- expectedBucketOwner: options.expectedBucketOwner,
43
- downloadBehavior: null,
44
- customCredentials: null,
45
- }).catch((err) => {
46
- (0, write_lambda_error_1.writeLambdaError)({
47
- bucketName: options.bucketName,
48
- errorInfo: {
49
- chunk: null,
50
- frame: null,
51
- isFatal: false,
52
- name: err.name,
53
- message: err.message,
54
- stack: `Could not upload stitching progress ${err.stack}`,
55
- tmpDir: null,
56
- type: 'stitcher',
57
- attempt: 1,
58
- totalAttempts: 1,
59
- willRetry: false,
60
- },
61
- renderId: options.renderId,
62
- expectedBucketOwner: options.expectedBucketOwner,
63
- });
64
- });
18
+ options.overallProgress.setCombinedFrames(framesEncoded);
65
19
  };
66
- const onErrors = (errors) => {
67
- renderer_1.RenderInternals.Log.error({ indent: false, logLevel: options.logLevel }, 'Found Errors', errors);
68
- const firstError = errors[0];
69
- if (firstError.chunk !== null) {
70
- throw new Error(`Stopping Lambda function because error occurred while rendering chunk ${firstError.chunk}:\n${errors[0].stack
71
- .split('\n')
72
- .map((s) => ` ${s}`)
73
- .join('\n')}`);
74
- }
75
- throw new Error(`Stopping Lambda function because error occurred: ${errors[0].stack}`);
76
- };
77
- const outdir = (0, node_path_1.join)(renderer_1.RenderInternals.tmpDir(constants_1.CONCAT_FOLDER_TOKEN), 'bucket');
78
- if ((0, node_fs_1.existsSync)(outdir)) {
79
- (0, node_fs_1.rmSync)(outdir, {
80
- recursive: true,
81
- });
82
- }
83
- (0, node_fs_1.mkdirSync)(outdir);
84
- const { hasAudio, hasVideo } = (0, render_has_audio_video_1.lambdaRenderHasAudioVideo)(options.renderMetadata);
85
- const chunkMultiplier = [hasAudio, hasVideo].filter(truthy_1.truthy).length;
86
- const expectedFiles = chunkMultiplier * options.chunkCount;
87
- const files = await (0, concat_videos_1.getAllFilesS3)({
88
- bucket: options.bucketName,
89
- expectedFiles,
90
- outdir,
91
- renderId: options.renderId,
92
- region: (0, get_current_region_1.getCurrentRegionInFunction)(),
93
- expectedBucketOwner: options.expectedBucketOwner,
94
- onErrors,
95
- logLevel: options.logLevel,
96
- });
97
20
  const encodingStart = Date.now();
98
21
  if (options.renderMetadata.type === 'still') {
99
22
  throw new Error('Cannot merge stills');
100
23
  }
101
- const { outfile, cleanupChunksProm } = await (0, concat_videos_1.concatVideosS3)({
24
+ const { outfile, cleanupChunksProm } = await (0, concat_videos_1.concatVideos)({
102
25
  onProgress,
103
26
  numberOfFrames: options.numberOfFrames,
104
27
  codec: options.codec,
105
28
  fps: options.fps,
106
29
  numberOfGifLoops: options.numberOfGifLoops,
107
- files,
108
- outdir,
30
+ files: options.files,
31
+ outdir: options.outdir,
109
32
  audioCodec: options.audioCodec,
110
33
  audioBitrate: options.audioBitrate,
111
34
  logLevel: options.logLevel,
@@ -116,8 +39,9 @@ const mergeChunksAndFinishRender = async (options) => {
116
39
  muted: options.renderMetadata.muted,
117
40
  });
118
41
  const encodingStop = Date.now();
119
- const outputSize = fs_1.default.statSync(outfile);
120
- const writeToS3 = (0, timer_1.timer)(`Writing to S3 (${outputSize.size} bytes)`, options.logLevel);
42
+ options.overallProgress.setTimeToCombine(encodingStop - encodingStart);
43
+ const outputSize = fs_1.default.statSync(outfile).size;
44
+ const writeToS3 = (0, timer_1.timer)(`Writing to S3 (${outputSize} bytes)`, options.logLevel);
121
45
  await (0, io_1.lambdaWriteFile)({
122
46
  bucketName: options.renderBucketName,
123
47
  key: options.key,
@@ -129,44 +53,9 @@ const mergeChunksAndFinishRender = async (options) => {
129
53
  customCredentials: options.customCredentials,
130
54
  });
131
55
  writeToS3.end();
132
- const contents = await (0, io_1.lambdaLs)({
133
- bucketName: options.bucketName,
134
- prefix: (0, constants_1.rendersPrefix)(options.renderId),
135
- expectedBucketOwner: options.expectedBucketOwner,
136
- region: (0, get_current_region_1.getCurrentRegionInFunction)(),
137
- });
138
- const finalEncodingProgressProm = (0, io_1.lambdaWriteFile)({
139
- bucketName: options.bucketName,
140
- key: (0, constants_1.encodingProgressKey)(options.renderId),
141
- body: String(Math.ceil(options.numberOfFrames /
142
- (0, get_encoding_progress_step_size_1.getEncodingProgressStepSize)(options.numberOfFrames))),
143
- region: (0, get_current_region_1.getCurrentRegionInFunction)(),
144
- privacy: 'private',
145
- expectedBucketOwner: options.expectedBucketOwner,
146
- downloadBehavior: null,
147
- customCredentials: null,
56
+ const errorExplanations = (0, inspect_errors_1.inspectErrors)({
57
+ errors: options.overallProgress.get().errors,
148
58
  });
149
- const errorExplanationsProm = (0, inspect_errors_1.inspectErrors)({
150
- contents,
151
- renderId: options.renderId,
152
- bucket: options.bucketName,
153
- region: (0, get_current_region_1.getCurrentRegionInFunction)(),
154
- expectedBucketOwner: options.expectedBucketOwner,
155
- });
156
- const jobs = (0, get_files_to_delete_1.getFilesToDelete)({
157
- chunkCount: options.chunkCount,
158
- renderId: options.renderId,
159
- hasAudio,
160
- hasVideo,
161
- });
162
- const deletProm = options.logLevel === 'verbose'
163
- ? Promise.resolve(0)
164
- : (0, delete_chunks_1.cleanupFiles)({
165
- region: (0, get_current_region_1.getCurrentRegionInFunction)(),
166
- bucket: options.bucketName,
167
- contents,
168
- jobs,
169
- });
170
59
  const cleanupSerializedInputPropsProm = (0, cleanup_serialized_input_props_1.cleanupSerializedInputProps)({
171
60
  bucketName: options.bucketName,
172
61
  region: (0, get_current_region_1.getCurrentRegionInFunction)(),
@@ -179,39 +68,23 @@ const mergeChunksAndFinishRender = async (options) => {
179
68
  });
180
69
  const { url: outputUrl } = (0, get_output_url_from_metadata_1.getOutputUrlFromMetadata)(options.renderMetadata, options.bucketName, options.customCredentials);
181
70
  const postRenderData = (0, create_post_render_data_1.createPostRenderData)({
182
- expectedBucketOwner: options.expectedBucketOwner,
183
71
  region: (0, get_current_region_1.getCurrentRegionInFunction)(),
184
- renderId: options.renderId,
185
72
  memorySizeInMb: Number(process.env.AWS_LAMBDA_FUNCTION_MEMORY_SIZE),
186
73
  renderMetadata: options.renderMetadata,
187
- contents,
188
- errorExplanations: await errorExplanationsProm,
189
- timeToEncode: encodingStop - encodingStart,
74
+ errorExplanations,
190
75
  timeToDelete: (await Promise.all([
191
- deletProm,
192
76
  cleanupSerializedInputPropsProm,
193
77
  cleanupResolvedInputPropsProm,
194
- ])).reduce((a, b) => a + b, 0),
78
+ ])).reduce((a, b) => Math.max(a, b), 0),
195
79
  outputFile: {
196
- lastModified: Date.now(),
197
- size: outputSize.size,
198
80
  url: outputUrl,
199
81
  },
82
+ outputSize,
83
+ timeToCombine: encodingStop - encodingStart,
84
+ overallProgress: options.overallProgress.get(),
85
+ timeToFinish: Date.now() - options.startTime,
200
86
  });
201
- await finalEncodingProgressProm;
202
- await (0, write_post_render_data_1.writePostRenderData)({
203
- bucketName: options.bucketName,
204
- expectedBucketOwner: options.expectedBucketOwner,
205
- postRenderData,
206
- region: (0, get_current_region_1.getCurrentRegionInFunction)(),
207
- renderId: options.renderId,
208
- });
209
- await (0, io_1.lambdaDeleteFile)({
210
- bucketName: options.bucketName,
211
- key: (0, constants_1.initalizedMetadataKey)(options.renderId),
212
- region: (0, get_current_region_1.getCurrentRegionInFunction)(),
213
- customCredentials: null,
214
- });
87
+ options.overallProgress.setPostRenderData(postRenderData);
215
88
  await Promise.all([cleanupChunksProm, fs_1.default.promises.rm(outfile)]);
216
89
  return postRenderData;
217
90
  };
@@ -1,2 +1,2 @@
1
1
  export declare const min: (arr: number[]) => number;
2
- export declare const max: (arr: number[]) => number | null;
2
+ export declare const max: (arr: number[]) => number;
@@ -19,7 +19,7 @@ const min = (arr) => {
19
19
  exports.min = min;
20
20
  const max = (arr) => {
21
21
  if (arr.length === 0) {
22
- return null;
22
+ throw new Error('Array of 0 length');
23
23
  }
24
24
  let biggest = arr[0];
25
25
  for (let i = 0; i < arr.length; i++) {
@@ -1,27 +1,49 @@
1
+ import type { LogLevel } from '@remotion/renderer';
1
2
  import type { AwsRegion } from '../../client';
3
+ import type { PostRenderData, RenderMetadata } from '../../shared/constants';
4
+ import type { ParsedTiming } from '../../shared/parse-lambda-timings-key';
5
+ import type { ChunkRetry } from './get-retry-stats';
6
+ import type { LambdaErrorInfo } from './write-lambda-error';
2
7
  export type OverallRenderProgress = {
3
8
  chunks: number[];
4
9
  framesRendered: number;
5
10
  framesEncoded: number;
6
11
  combinedFrames: number;
7
12
  timeToCombine: number | null;
13
+ timeToEncode: number | null;
14
+ timeToRenderFrames: number | null;
15
+ lambdasInvoked: number;
16
+ retries: ChunkRetry[];
17
+ postRenderData: PostRenderData | null;
18
+ timings: ParsedTiming[];
19
+ renderMetadata: RenderMetadata | null;
20
+ errors: LambdaErrorInfo[];
21
+ timeoutTimestamp: number;
8
22
  };
9
23
  export type OverallProgressHelper = {
10
24
  upload: () => Promise<void>;
11
- finishUploading: () => void;
12
25
  setFrames: ({ encoded, rendered, index, }: {
13
26
  rendered: number;
14
27
  encoded: number;
15
28
  index: number;
16
29
  }) => void;
17
- addChunkCompleted: (chunkIndex: number) => void;
30
+ setLambdaInvoked: (chunk: number) => void;
31
+ addChunkCompleted: (chunkIndex: number, start: number, rendered: number) => void;
18
32
  setCombinedFrames: (framesEncoded: number) => void;
19
33
  setTimeToCombine: (timeToCombine: number) => void;
34
+ addRetry: (retry: ChunkRetry) => void;
35
+ setPostRenderData: (postRenderData: PostRenderData) => void;
36
+ setRenderMetadata: (renderMetadata: RenderMetadata) => void;
37
+ addErrorWithoutUpload: (errorInfo: LambdaErrorInfo) => void;
38
+ setExpectedChunks: (expectedChunks: number) => void;
39
+ get: () => OverallRenderProgress;
20
40
  };
21
- export declare const makeOverallRenderProgress: ({ renderId, bucketName, expectedBucketOwner, region, expectedChunks, }: {
41
+ export declare const makeInitialOverallRenderProgress: (timeoutTimestamp: number) => OverallRenderProgress;
42
+ export declare const makeOverallRenderProgress: ({ renderId, bucketName, expectedBucketOwner, region, timeoutTimestamp, logLevel, }: {
22
43
  renderId: string;
23
44
  bucketName: string;
24
45
  expectedBucketOwner: string;
25
46
  region: AwsRegion;
26
- expectedChunks: number;
47
+ timeoutTimestamp: number;
48
+ logLevel: LogLevel;
27
49
  }) => OverallProgressHelper;
@@ -1,41 +1,56 @@
1
1
  "use strict";
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
- exports.makeOverallRenderProgress = void 0;
3
+ exports.makeOverallRenderProgress = exports.makeInitialOverallRenderProgress = void 0;
4
+ const renderer_1 = require("@remotion/renderer");
4
5
  const constants_1 = require("../../shared/constants");
5
6
  const io_1 = require("./io");
6
- const makeOverallRenderProgress = ({ renderId, bucketName, expectedBucketOwner, region, expectedChunks, }) => {
7
- const framesRendered = new Array(expectedChunks).fill(0);
8
- const framesEncoded = new Array(expectedChunks).fill(0);
9
- const renderProgress = {
7
+ const makeInitialOverallRenderProgress = (timeoutTimestamp) => {
8
+ return {
10
9
  chunks: [],
11
10
  framesRendered: 0,
12
11
  framesEncoded: 0,
13
12
  combinedFrames: 0,
14
13
  timeToCombine: null,
14
+ timeToEncode: null,
15
+ lambdasInvoked: 0,
16
+ retries: [],
17
+ postRenderData: null,
18
+ timings: [],
19
+ renderMetadata: null,
20
+ errors: [],
21
+ timeToRenderFrames: null,
22
+ timeoutTimestamp,
15
23
  };
24
+ };
25
+ exports.makeInitialOverallRenderProgress = makeInitialOverallRenderProgress;
26
+ const makeOverallRenderProgress = ({ renderId, bucketName, expectedBucketOwner, region, timeoutTimestamp, logLevel, }) => {
27
+ let framesRendered = [];
28
+ let framesEncoded = [];
29
+ let lambdasInvoked = [];
30
+ const renderProgress = (0, exports.makeInitialOverallRenderProgress)(timeoutTimestamp);
16
31
  let currentUploadPromise = null;
17
- let lastUpload = null;
18
- let stopUploading = false;
19
- const finishUploading = () => {
20
- stopUploading = true;
21
- };
22
- // TODO: What if upload fails?
32
+ const getCurrentProgress = () => renderProgress;
33
+ let latestUploadRequest = 0;
34
+ const getLatestRequestId = () => latestUploadRequest;
35
+ let encodeStartTime = null;
36
+ let renderFramesStartTime = null;
23
37
  const upload = async () => {
24
- if (stopUploading) {
25
- return;
26
- }
27
- if (lastUpload === JSON.stringify(renderProgress)) {
28
- return;
29
- }
38
+ const uploadRequestId = ++latestUploadRequest;
30
39
  if (currentUploadPromise) {
31
- currentUploadPromise = currentUploadPromise.then(() => {
32
- currentUploadPromise = null;
33
- return upload();
34
- });
40
+ await currentUploadPromise;
41
+ }
42
+ const toWrite = JSON.stringify(getCurrentProgress());
43
+ // Deduplicate two fast incoming requests
44
+ await new Promise((resolve) => {
45
+ setImmediate(() => resolve());
46
+ });
47
+ // If request has been replaced by a new one
48
+ if (getLatestRequestId() !== uploadRequestId) {
35
49
  return;
36
50
  }
51
+ const start = Date.now();
37
52
  currentUploadPromise = (0, io_1.lambdaWriteFile)({
38
- body: JSON.stringify(renderProgress),
53
+ body: toWrite,
39
54
  bucketName,
40
55
  customCredentials: null,
41
56
  downloadBehavior: null,
@@ -43,23 +58,68 @@ const makeOverallRenderProgress = ({ renderId, bucketName, expectedBucketOwner,
43
58
  key: (0, constants_1.overallProgressKey)(renderId),
44
59
  privacy: 'private',
45
60
  region,
61
+ })
62
+ .then(() => {
63
+ // By default, upload is way too fast (~20 requests per second)
64
+ // Space out the requests a bit
65
+ return new Promise((resolve) => {
66
+ setTimeout(resolve, 500 - (Date.now() - start));
67
+ });
68
+ })
69
+ .catch((err) => {
70
+ // If an error occurs in uploading the state that contains the errors,
71
+ // that is unfortunate. We just log it.
72
+ renderer_1.RenderInternals.Log.error({ indent: false, logLevel }, 'Error uploading progress', err);
46
73
  });
47
74
  await currentUploadPromise;
48
- lastUpload = JSON.stringify(renderProgress);
49
75
  currentUploadPromise = null;
50
76
  };
51
77
  return {
52
78
  upload,
53
- finishUploading,
54
79
  setFrames: ({ encoded, rendered, index, }) => {
80
+ if (framesRendered.length === 0) {
81
+ throw new Error('Expected chunks to be set before frames are set');
82
+ }
83
+ if (framesEncoded.length === 0) {
84
+ throw new Error('Expected chunks to be set before frames are set');
85
+ }
55
86
  framesRendered[index] = rendered;
56
87
  framesEncoded[index] = encoded;
57
- renderProgress.framesRendered = framesRendered.reduce((a, b) => a + b, 0);
58
- renderProgress.framesEncoded = framesEncoded.reduce((a, b) => a + b, 0);
88
+ const totalFramesEncoded = framesEncoded.reduce((a, b) => a + b, 0);
89
+ const totalFramesRendered = framesRendered.reduce((a, b) => a + b, 0);
90
+ if (renderProgress.framesEncoded === 0 && totalFramesEncoded > 0) {
91
+ encodeStartTime = Date.now();
92
+ }
93
+ if (renderProgress.framesRendered === 0 && totalFramesRendered > 0) {
94
+ renderFramesStartTime = Date.now();
95
+ }
96
+ if (renderProgress.timeToRenderFrames === null) {
97
+ const frameCount = renderProgress.renderMetadata &&
98
+ renderProgress.renderMetadata.type === 'video'
99
+ ? renderer_1.RenderInternals.getFramesToRender(renderProgress.renderMetadata.frameRange, renderProgress.renderMetadata.everyNthFrame).length
100
+ : null;
101
+ if (frameCount === totalFramesRendered) {
102
+ const timeToRenderFrames = Date.now() - (renderFramesStartTime !== null && renderFramesStartTime !== void 0 ? renderFramesStartTime : Date.now());
103
+ renderProgress.timeToRenderFrames = timeToRenderFrames;
104
+ }
105
+ }
106
+ renderProgress.framesRendered = totalFramesRendered;
107
+ renderProgress.framesEncoded = totalFramesEncoded;
59
108
  upload();
60
109
  },
61
- addChunkCompleted: (chunkIndex) => {
110
+ addChunkCompleted: (chunkIndex, start, rendered) => {
111
+ var _a;
62
112
  renderProgress.chunks.push(chunkIndex);
113
+ if (renderProgress.chunks.length ===
114
+ ((_a = renderProgress.renderMetadata) === null || _a === void 0 ? void 0 : _a.totalChunks)) {
115
+ const timeToEncode = Date.now() - (encodeStartTime !== null && encodeStartTime !== void 0 ? encodeStartTime : Date.now());
116
+ renderProgress.timeToEncode = timeToEncode;
117
+ }
118
+ renderProgress.timings.push({
119
+ chunk: chunkIndex,
120
+ start,
121
+ rendered,
122
+ });
63
123
  upload();
64
124
  },
65
125
  setCombinedFrames: (frames) => {
@@ -70,6 +130,35 @@ const makeOverallRenderProgress = ({ renderId, bucketName, expectedBucketOwner,
70
130
  renderProgress.timeToCombine = timeToCombine;
71
131
  upload();
72
132
  },
133
+ setLambdaInvoked(chunk) {
134
+ if (lambdasInvoked.length === 0) {
135
+ throw new Error('Expected chunks to be set before lambdas are set');
136
+ }
137
+ lambdasInvoked[chunk] = true;
138
+ renderProgress.lambdasInvoked = lambdasInvoked.reduce((a, b) => a + Number(b), 0);
139
+ upload();
140
+ },
141
+ setPostRenderData(postRenderData) {
142
+ renderProgress.postRenderData = postRenderData;
143
+ upload();
144
+ },
145
+ setRenderMetadata: (renderMetadata) => {
146
+ renderProgress.renderMetadata = renderMetadata;
147
+ upload();
148
+ },
149
+ addErrorWithoutUpload: (errorInfo) => {
150
+ renderProgress.errors.push(errorInfo);
151
+ },
152
+ setExpectedChunks: (expectedChunks) => {
153
+ framesRendered = new Array(expectedChunks).fill(0);
154
+ framesEncoded = new Array(expectedChunks).fill(0);
155
+ lambdasInvoked = new Array(expectedChunks).fill(false);
156
+ },
157
+ addRetry(retry) {
158
+ renderProgress.retries.push(retry);
159
+ upload();
160
+ },
161
+ get: () => renderProgress,
73
162
  };
74
163
  };
75
164
  exports.makeOverallRenderProgress = makeOverallRenderProgress;
@@ -13,6 +13,10 @@ const streamRenderer = ({ payload, functionName, outdir, overallProgress, files,
13
13
  }
14
14
  return new Promise((resolve) => {
15
15
  const receivedStreamingPayload = ({ message }) => {
16
+ if (message.type === 'lambda-invoked') {
17
+ overallProgress.setLambdaInvoked(payload.chunk);
18
+ return;
19
+ }
16
20
  if (message.type === 'frames-rendered') {
17
21
  overallProgress.setFrames({
18
22
  index: payload.chunk,
@@ -22,28 +26,24 @@ const streamRenderer = ({ payload, functionName, outdir, overallProgress, files,
22
26
  return;
23
27
  }
24
28
  if (message.type === 'video-chunk-rendered') {
25
- const filename = (0, path_1.join)(outdir, (0, defaults_1.chunkKeyForIndex)({
26
- index: payload.chunk,
27
- type: 'video',
28
- }));
29
+ const filename = (0, path_1.join)(outdir, `chunk:${String(payload.chunk).padStart(8, '0')}:video`);
29
30
  (0, fs_1.writeFileSync)(filename, message.payload);
30
31
  files.push(filename);
31
32
  return;
32
33
  }
33
34
  if (message.type === 'audio-chunk-rendered') {
34
- const filename = (0, path_1.join)(outdir, (0, defaults_1.chunkKeyForIndex)({
35
- index: payload.chunk,
36
- type: 'audio',
37
- }));
35
+ const filename = (0, path_1.join)(outdir, `chunk:${String(payload.chunk).padStart(8, '0')}:audio`);
38
36
  (0, fs_1.writeFileSync)(filename, message.payload);
39
37
  files.push(filename);
40
38
  return;
41
39
  }
42
40
  if (message.type === 'chunk-complete') {
43
- overallProgress.addChunkCompleted(payload.chunk);
41
+ renderer_1.RenderInternals.Log.verbose({ indent: false, logLevel }, `Rendered chunk ${payload.chunk}`);
42
+ overallProgress.addChunkCompleted(payload.chunk, message.payload.start, message.payload.rendered);
44
43
  return;
45
44
  }
46
45
  if (message.type === 'error-occurred') {
46
+ overallProgress.addErrorWithoutUpload(message.payload.errorInfo);
47
47
  overallProgress.setFrames({
48
48
  encoded: 0,
49
49
  index: payload.chunk,
@@ -62,6 +62,7 @@ const streamRenderer = ({ payload, functionName, outdir, overallProgress, files,
62
62
  error: message.payload.error,
63
63
  shouldRetry: message.payload.shouldRetry,
64
64
  });
65
+ return;
65
66
  }
66
67
  throw new Error(`Unknown message type ${message.type}`);
67
68
  };
@@ -104,6 +105,11 @@ const streamRendererFunctionWithRetry = async ({ payload, files, functionName, o
104
105
  if (!result.shouldRetry) {
105
106
  throw result.error;
106
107
  }
108
+ overallProgress.addRetry({
109
+ attempt: payload.attempt + 1,
110
+ time: Date.now(),
111
+ chunk: payload.chunk,
112
+ });
107
113
  return (0, exports.streamRendererFunctionWithRetry)({
108
114
  files,
109
115
  functionName,
@@ -11,7 +11,4 @@ export declare class ResponseStream extends Stream.Writable {
11
11
  setContentType(contentType: string): void;
12
12
  setIsBase64Encoded(isBase64Encoded: boolean): void;
13
13
  }
14
- export declare const HANDLER_STREAMING: unique symbol;
15
- export declare const STREAM_RESPONSE = "response";
16
- export declare function isInAWS(handler: Function): boolean;
17
14
  export declare function streamifyResponse(handler: Function): Function;
@@ -1,6 +1,6 @@
1
1
  "use strict";
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
- exports.streamifyResponse = exports.isInAWS = exports.STREAM_RESPONSE = exports.HANDLER_STREAMING = exports.ResponseStream = void 0;
3
+ exports.streamifyResponse = exports.ResponseStream = void 0;
4
4
  const stream_1 = require("stream");
5
5
  class ResponseStream extends stream_1.Stream.Writable {
6
6
  constructor() {
@@ -29,16 +29,6 @@ function patchArgs(argList) {
29
29
  }
30
30
  return argList[1];
31
31
  }
32
- exports.HANDLER_STREAMING = Symbol.for('aws.lambda.runtime.handler.streaming');
33
- exports.STREAM_RESPONSE = 'response';
34
- function isInAWS(handler) {
35
- return (
36
- // @ts-expect-error
37
- handler[exports.HANDLER_STREAMING] !== undefined &&
38
- // @ts-expect-error
39
- handler[exports.HANDLER_STREAMING] === exports.STREAM_RESPONSE);
40
- }
41
- exports.isInAWS = isInAWS;
42
32
  function streamifyResponse(handler) {
43
33
  // Check if we are inside Lambda
44
34
  if (process.env.AWS_LAMBDA_FUNCTION_VERSION &&
@@ -56,9 +46,7 @@ function streamifyResponse(handler) {
56
46
  EventStream: [
57
47
  {
58
48
  PayloadChunk: {
59
- Payload: responseStream._isBase64Encoded
60
- ? responseStream.getBufferedData()
61
- : responseStream.getBufferedData(),
49
+ Payload: responseStream.getBufferedData(),
62
50
  },
63
51
  InvokeComplete: true,
64
52
  },
@@ -17,12 +17,9 @@ export type LambdaErrorInfo = {
17
17
  };
18
18
  export declare const getTmpDirStateIfENoSp: (err: string) => LambdaErrorInfo['tmpDir'];
19
19
  export type EnhancedErrorInfo = LambdaErrorInfo & {
20
+ /**
21
+ * @deprecated Will always be an empty string.
22
+ */
20
23
  s3Location: string;
21
24
  explanation: string | null;
22
25
  };
23
- export declare const writeLambdaError: ({ bucketName, renderId, errorInfo, expectedBucketOwner, }: {
24
- bucketName: string;
25
- renderId: string;
26
- expectedBucketOwner: string;
27
- errorInfo: LambdaErrorInfo;
28
- }) => Promise<void>;
@@ -1,10 +1,7 @@
1
1
  "use strict";
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
- exports.writeLambdaError = exports.getTmpDirStateIfENoSp = void 0;
4
- const constants_1 = require("../../shared/constants");
5
- const get_current_region_1 = require("./get-current-region");
3
+ exports.getTmpDirStateIfENoSp = void 0;
6
4
  const get_files_in_folder_1 = require("./get-files-in-folder");
7
- const io_1 = require("./io");
8
5
  const is_enosp_err_1 = require("./is-enosp-err");
9
6
  const getTmpDirStateIfENoSp = (err) => {
10
7
  if (!(0, is_enosp_err_1.errorIsOutOfSpaceError)(err)) {
@@ -21,20 +18,3 @@ const getTmpDirStateIfENoSp = (err) => {
21
18
  };
22
19
  };
23
20
  exports.getTmpDirStateIfENoSp = getTmpDirStateIfENoSp;
24
- const writeLambdaError = async ({ bucketName, renderId, errorInfo, expectedBucketOwner, }) => {
25
- await (0, io_1.lambdaWriteFile)({
26
- bucketName,
27
- key: `${(0, constants_1.getErrorFileName)({
28
- renderId,
29
- chunk: errorInfo.chunk,
30
- attempt: errorInfo.attempt,
31
- })}.txt`,
32
- body: JSON.stringify(errorInfo),
33
- region: (0, get_current_region_1.getCurrentRegionInFunction)(),
34
- privacy: 'private',
35
- expectedBucketOwner,
36
- downloadBehavior: null,
37
- customCredentials: null,
38
- });
39
- };
40
- exports.writeLambdaError = writeLambdaError;
@@ -1 +1,6 @@
1
+ export type OrError<T> = T | {
2
+ type: 'error';
3
+ message: string;
4
+ stack: string;
5
+ };
1
6
  export declare const handler: Function;