@remotion/lambda 4.0.202 → 4.0.204

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (57) hide show
  1. package/dist/functions/helpers/cleanup-props.d.ts +1 -4
  2. package/dist/functions/helpers/expected-out-name.d.ts +1 -1
  3. package/dist/functions/helpers/get-browser-instance.d.ts +3 -2
  4. package/dist/functions/helpers/get-browser-instance.js +3 -4
  5. package/dist/functions/helpers/get-custom-out-name.d.ts +1 -1
  6. package/dist/functions/helpers/io.d.ts +3 -33
  7. package/dist/functions/helpers/io.js +3 -104
  8. package/dist/functions/helpers/print-cloudwatch-helper.d.ts +1 -1
  9. package/dist/functions/helpers/print-cloudwatch-helper.js +3 -3
  10. package/dist/functions/helpers/write-lambda-error.d.ts +2 -3
  11. package/dist/functions/helpers/write-lambda-error.js +3 -2
  12. package/dist/functions/provider-implementation.d.ts +0 -0
  13. package/dist/functions/provider-implementation.js +1 -0
  14. package/dist/shared/compress-props.d.ts +8 -6
  15. package/dist/shared/compress-props.js +11 -14
  16. package/package.json +11 -11
  17. package/remotionlambda-arm64.zip +0 -0
  18. package/dist/functions/helpers/check-if-render-exists.d.ts +0 -3
  19. package/dist/functions/helpers/check-if-render-exists.js +0 -15
  20. package/dist/functions/helpers/delete-chunks.d.ts +0 -9
  21. package/dist/functions/helpers/delete-chunks.js +0 -25
  22. package/dist/functions/helpers/get-cleanup-progress.d.ts +0 -10
  23. package/dist/functions/helpers/get-cleanup-progress.js +0 -35
  24. package/dist/functions/helpers/get-encoding-metadata.d.ts +0 -7
  25. package/dist/functions/helpers/get-encoding-metadata.js +0 -15
  26. package/dist/functions/helpers/get-encoding-progress-step-size.d.ts +0 -1
  27. package/dist/functions/helpers/get-encoding-progress-step-size.js +0 -7
  28. package/dist/functions/helpers/get-files-to-delete.d.ts +0 -10
  29. package/dist/functions/helpers/get-files-to-delete.js +0 -52
  30. package/dist/functions/helpers/get-final-encoding-status.d.ts +0 -6
  31. package/dist/functions/helpers/get-final-encoding-status.js +0 -18
  32. package/dist/functions/helpers/get-folder-size.d.ts +0 -1
  33. package/dist/functions/helpers/get-folder-size.js +0 -8
  34. package/dist/functions/helpers/get-lambdas-invoked-stats.d.ts +0 -8
  35. package/dist/functions/helpers/get-lambdas-invoked-stats.js +0 -14
  36. package/dist/functions/helpers/get-post-render-data.d.ts +0 -8
  37. package/dist/functions/helpers/get-post-render-data.js +0 -22
  38. package/dist/functions/helpers/get-render-metadata.d.ts +0 -8
  39. package/dist/functions/helpers/get-render-metadata.js +0 -17
  40. package/dist/functions/helpers/get-rendered-frames-progress.d.ts +0 -8
  41. package/dist/functions/helpers/get-rendered-frames-progress.js +0 -37
  42. package/dist/functions/helpers/get-time-to-finish.d.ts +0 -5
  43. package/dist/functions/helpers/get-time-to-finish.js +0 -13
  44. package/dist/functions/helpers/streaming-payloads.d.ts +0 -19
  45. package/dist/functions/helpers/streaming-payloads.js +0 -25
  46. package/dist/functions/helpers/write-post-render-data.d.ts +0 -9
  47. package/dist/functions/helpers/write-post-render-data.js +0 -18
  48. package/dist/functions/merge.d.ts +0 -9
  49. package/dist/functions/merge.js +0 -61
  50. package/dist/shared/chunk-progress.d.ts +0 -9
  51. package/dist/shared/chunk-progress.js +0 -2034
  52. package/dist/shared/parse-chunk-key.d.ts +0 -5
  53. package/dist/shared/parse-chunk-key.js +0 -15
  54. package/dist/shared/parse-lambda-initialized-key.d.ts +0 -5
  55. package/dist/shared/parse-lambda-initialized-key.js +0 -15
  56. package/dist/shared/serialize-props.d.ts +0 -14
  57. package/dist/shared/serialize-props.js +0 -36
@@ -5,7 +5,4 @@ export declare const cleanupProps: <Provider extends CloudProvider<string, Recor
5
5
  inputProps: SerializedInputProps;
6
6
  providerSpecifics: ProviderSpecifics<Provider>;
7
7
  forcePathStyle: boolean;
8
- }) => Promise<[
9
- number,
10
- number
11
- ]>;
8
+ }) => Promise<[number, number]>;
@@ -1,4 +1,4 @@
1
1
  import type { CustomCredentials, OutNameInput } from '@remotion/serverless/client';
2
2
  import type { OutNameOutput, RenderMetadata } from '../../defaults';
3
3
  export declare const getCredentialsFromOutName: <Region extends string>(name: OutNameInput<Region> | null) => CustomCredentials<Region> | null;
4
- export declare const getExpectedOutName: <Region extends string>(renderMetadata: RenderMetadata<Region>, bucketName: string, customCredentials: CustomCredentials<Region> | null) => OutNameOutput<Region>;
4
+ export declare const getExpectedOutName: <Region extends string>(renderMetadata: RenderMetadata, bucketName: string, customCredentials: CustomCredentials<Region> | null) => OutNameOutput<Region>;
@@ -1,9 +1,10 @@
1
1
  import type { ChromiumOptions, LogLevel, openBrowser } from '@remotion/renderer';
2
- import type { Await } from '../../shared/await';
2
+ import type { ProviderSpecifics } from '@remotion/serverless';
3
+ import type { Await } from '@remotion/serverless/client';
3
4
  type LaunchedBrowser = {
4
5
  instance: Await<ReturnType<typeof openBrowser>>;
5
6
  configurationString: string;
6
7
  };
7
8
  export declare const forgetBrowserEventLoop: (logLevel: LogLevel) => void;
8
- export declare const getBrowserInstance: (logLevel: LogLevel, indent: boolean, chromiumOptions: ChromiumOptions) => Promise<LaunchedBrowser>;
9
+ export declare const getBrowserInstance: (logLevel: LogLevel, indent: boolean, chromiumOptions: ChromiumOptions, providerSpecifics: ProviderSpecifics) => Promise<LaunchedBrowser>;
9
10
  export {};
@@ -3,7 +3,6 @@ Object.defineProperty(exports, "__esModule", { value: true });
3
3
  exports.getBrowserInstance = exports.forgetBrowserEventLoop = void 0;
4
4
  const renderer_1 = require("@remotion/renderer");
5
5
  const version_1 = require("remotion/version");
6
- const get_chromium_executable_path_1 = require("./get-chromium-executable-path");
7
6
  const makeConfigurationString = (options, logLevel) => {
8
7
  var _a, _b, _c;
9
8
  return [
@@ -37,7 +36,7 @@ const forgetBrowserEventLoop = (logLevel) => {
37
36
  _browserInstance === null || _browserInstance === void 0 ? void 0 : _browserInstance.instance.forgetEventLoop();
38
37
  };
39
38
  exports.forgetBrowserEventLoop = forgetBrowserEventLoop;
40
- const getBrowserInstance = async (logLevel, indent, chromiumOptions) => {
39
+ const getBrowserInstance = async (logLevel, indent, chromiumOptions, providerSpecifics) => {
41
40
  var _a;
42
41
  const actualChromiumOptions = {
43
42
  ...chromiumOptions,
@@ -57,7 +56,7 @@ const getBrowserInstance = async (logLevel, indent, chromiumOptions) => {
57
56
  if (!_browserInstance) {
58
57
  renderer_1.RenderInternals.Log.info({ indent: false, logLevel }, 'Cold Lambda function, launching new browser instance');
59
58
  launching = true;
60
- const execPath = (0, get_chromium_executable_path_1.executablePath)();
59
+ const execPath = providerSpecifics.getChromiumPath();
61
60
  const instance = await renderer_1.RenderInternals.internalOpenBrowser({
62
61
  browser: 'chrome',
63
62
  browserExecutable: execPath,
@@ -91,7 +90,7 @@ const getBrowserInstance = async (logLevel, indent, chromiumOptions) => {
91
90
  _browserInstance.instance.rememberEventLoop();
92
91
  await _browserInstance.instance.close(true, logLevel, indent);
93
92
  _browserInstance = null;
94
- return (0, exports.getBrowserInstance)(logLevel, indent, chromiumOptions);
93
+ return (0, exports.getBrowserInstance)(logLevel, indent, chromiumOptions, providerSpecifics);
95
94
  }
96
95
  renderer_1.RenderInternals.Log.info({ indent: false, logLevel }, 'Warm Lambda function, reusing browser instance');
97
96
  _browserInstance.instance.rememberEventLoop();
@@ -1,6 +1,6 @@
1
1
  import type { CustomCredentials, OutNameInput } from '@remotion/serverless/client';
2
2
  import type { RenderMetadata } from '../../defaults';
3
3
  export declare const getCustomOutName: <Region extends string>({ renderMetadata, customCredentials, }: {
4
- renderMetadata: RenderMetadata<Region>;
4
+ renderMetadata: RenderMetadata;
5
5
  customCredentials: CustomCredentials<Region> | null;
6
6
  }) => OutNameInput<Region> | null;
@@ -1,9 +1,6 @@
1
- /// <reference types="node" />
2
- /// <reference types="node" />
3
1
  import type { _Object } from '@aws-sdk/client-s3';
4
- import type { AwsRegion, CustomCredentials, DownloadBehavior, Privacy } from '@remotion/serverless/client';
5
- import type { ReadStream } from 'node:fs';
6
- import type { Readable } from 'stream';
2
+ import type { CustomCredentials } from '@remotion/serverless/client';
3
+ import type { AwsRegion } from '../../regions';
7
4
  export type LambdaLSInput = {
8
5
  bucketName: string;
9
6
  prefix: string;
@@ -12,39 +9,12 @@ export type LambdaLSInput = {
12
9
  continuationToken?: string;
13
10
  };
14
11
  export type LambdaLsReturnType = Promise<_Object[]>;
15
- export declare const lambdaLs: ({ bucketName, prefix, region, expectedBucketOwner, continuationToken, }: LambdaLSInput) => LambdaLsReturnType;
16
- export declare const lambdaDeleteFile: ({ bucketName, key, region, customCredentials, }: {
17
- region: AwsRegion;
18
- bucketName: string;
19
- key: string;
20
- customCredentials: CustomCredentials | null;
21
- }) => Promise<void>;
22
- type LambdaWriteFileInput = {
23
- bucketName: string;
24
- key: string;
25
- body: ReadStream | string | Uint8Array;
26
- region: AwsRegion;
27
- privacy: Privacy;
28
- expectedBucketOwner: string | null;
29
- downloadBehavior: DownloadBehavior | null;
30
- customCredentials: CustomCredentials | null;
31
- };
32
- export declare const lambdaWriteFile: (params: LambdaWriteFileInput & {
33
- retries?: number;
34
- }) => Promise<void>;
35
- export declare const lambdaReadFile: ({ bucketName, key, region, expectedBucketOwner, }: {
36
- bucketName: string;
37
- key: string;
38
- region: AwsRegion;
39
- expectedBucketOwner: string;
40
- }) => Promise<Readable>;
41
12
  export declare const lambdaHeadCommand: ({ bucketName, key, region, customCredentials, }: {
42
13
  bucketName: string;
43
14
  key: string;
44
15
  region: AwsRegion;
45
- customCredentials: CustomCredentials | null;
16
+ customCredentials: CustomCredentials<AwsRegion> | null;
46
17
  }) => Promise<{
47
18
  LastModified?: Date | undefined;
48
19
  ContentLength?: number | undefined;
49
20
  }>;
50
- export {};
@@ -1,111 +1,10 @@
1
1
  "use strict";
2
- var __importDefault = (this && this.__importDefault) || function (mod) {
3
- return (mod && mod.__esModule) ? mod : { "default": mod };
4
- };
5
2
  Object.defineProperty(exports, "__esModule", { value: true });
6
- exports.lambdaHeadCommand = exports.lambdaReadFile = exports.lambdaWriteFile = exports.lambdaDeleteFile = exports.lambdaLs = void 0;
3
+ exports.lambdaHeadCommand = void 0;
7
4
  const client_s3_1 = require("@aws-sdk/client-s3");
8
- const mime_types_1 = __importDefault(require("mime-types"));
9
- const aws_clients_1 = require("../../shared/aws-clients");
10
- const content_disposition_header_1 = require("../../shared/content-disposition-header");
11
- const lambdaLs = async ({ bucketName, prefix, region, expectedBucketOwner, continuationToken, }) => {
12
- var _a, _b, _c;
13
- try {
14
- const list = await (0, aws_clients_1.getS3Client)(region, null).send(new client_s3_1.ListObjectsV2Command({
15
- Bucket: bucketName,
16
- Prefix: prefix,
17
- ExpectedBucketOwner: expectedBucketOwner !== null && expectedBucketOwner !== void 0 ? expectedBucketOwner : undefined,
18
- ContinuationToken: continuationToken,
19
- }));
20
- if (list.NextContinuationToken) {
21
- return [
22
- ...((_a = list.Contents) !== null && _a !== void 0 ? _a : []),
23
- ...(await (0, exports.lambdaLs)({
24
- bucketName,
25
- prefix,
26
- expectedBucketOwner,
27
- region,
28
- continuationToken: list.NextContinuationToken,
29
- })),
30
- ];
31
- }
32
- return (_b = list.Contents) !== null && _b !== void 0 ? _b : [];
33
- }
34
- catch (err) {
35
- if (!expectedBucketOwner) {
36
- throw err;
37
- }
38
- // Prevent from accessing a foreign bucket, retry without ExpectedBucketOwner and see if it works. If it works then it's an owner mismatch.
39
- if ((_c = err.stack) === null || _c === void 0 ? void 0 : _c.includes('AccessDenied')) {
40
- await (0, aws_clients_1.getS3Client)(region, null).send(new client_s3_1.ListObjectsV2Command({
41
- Bucket: bucketName,
42
- Prefix: prefix,
43
- }));
44
- throw new Error(`Bucket owner mismatch: Expected the bucket ${bucketName} to be owned by you (AWS Account ID: ${expectedBucketOwner}) but it's not the case. Did you accidentially specify the wrong bucket?`);
45
- }
46
- throw err;
47
- }
48
- };
49
- exports.lambdaLs = lambdaLs;
50
- const lambdaDeleteFile = async ({ bucketName, key, region, customCredentials, }) => {
51
- await (0, aws_clients_1.getS3Client)(region, customCredentials).send(new client_s3_1.DeleteObjectCommand({
52
- Bucket: bucketName,
53
- Key: key,
54
- }));
55
- };
56
- exports.lambdaDeleteFile = lambdaDeleteFile;
57
- const tryLambdaWriteFile = async ({ bucketName, key, body, region, privacy, expectedBucketOwner, downloadBehavior, customCredentials, }) => {
58
- await (0, aws_clients_1.getS3Client)(region, customCredentials).send(new client_s3_1.PutObjectCommand({
59
- Bucket: bucketName,
60
- Key: key,
61
- Body: body,
62
- ACL: privacy === 'no-acl'
63
- ? undefined
64
- : privacy === 'private'
65
- ? 'private'
66
- : 'public-read',
67
- ExpectedBucketOwner: customCredentials
68
- ? undefined
69
- : expectedBucketOwner !== null && expectedBucketOwner !== void 0 ? expectedBucketOwner : undefined,
70
- ContentType: mime_types_1.default.lookup(key) || 'application/octet-stream',
71
- ContentDisposition: (0, content_disposition_header_1.getContentDispositionHeader)(downloadBehavior),
72
- }));
73
- };
74
- const lambdaWriteFile = async (params) => {
75
- var _a;
76
- const remainingRetries = (_a = params.retries) !== null && _a !== void 0 ? _a : 2;
77
- try {
78
- await tryLambdaWriteFile(params);
79
- }
80
- catch (err) {
81
- if (remainingRetries === 0) {
82
- throw err;
83
- }
84
- const backoff = 2 ** (2 - remainingRetries) * 2000;
85
- await new Promise((resolve) => {
86
- setTimeout(resolve, backoff);
87
- });
88
- console.warn('Failed to write file to Lambda:');
89
- console.warn(err);
90
- console.warn(`Retrying (${remainingRetries} retries remaining)...`);
91
- return (0, exports.lambdaWriteFile)({
92
- ...params,
93
- retries: remainingRetries - 1,
94
- });
95
- }
96
- };
97
- exports.lambdaWriteFile = lambdaWriteFile;
98
- const lambdaReadFile = async ({ bucketName, key, region, expectedBucketOwner, }) => {
99
- const { Body } = await (0, aws_clients_1.getS3Client)(region, null).send(new client_s3_1.GetObjectCommand({
100
- Bucket: bucketName,
101
- Key: key,
102
- ExpectedBucketOwner: expectedBucketOwner,
103
- }));
104
- return Body;
105
- };
106
- exports.lambdaReadFile = lambdaReadFile;
5
+ const get_s3_client_1 = require("../../shared/get-s3-client");
107
6
  const lambdaHeadCommand = async ({ bucketName, key, region, customCredentials, }) => {
108
- const head = await (0, aws_clients_1.getS3Client)(region, customCredentials).send(new client_s3_1.HeadObjectCommand({
7
+ const head = await (0, get_s3_client_1.getS3Client)(region, customCredentials).send(new client_s3_1.HeadObjectCommand({
109
8
  Bucket: bucketName,
110
9
  Key: key,
111
10
  }));
@@ -1,3 +1,3 @@
1
1
  import type { LogLevel } from '@remotion/renderer';
2
2
  import type { ServerlessRoutines } from '@remotion/serverless/client';
3
- export declare const printLoggingGrepHelper: (type: ServerlessRoutines, data: Record<string, string | boolean>, logLevel: LogLevel) => void;
3
+ export declare const printCloudwatchHelper: (type: ServerlessRoutines, data: Record<string, string | boolean>, logLevel: LogLevel) => void;
@@ -1,12 +1,12 @@
1
1
  "use strict";
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
- exports.printLoggingGrepHelper = void 0;
3
+ exports.printCloudwatchHelper = void 0;
4
4
  const renderer_1 = require("@remotion/renderer");
5
- const printLoggingGrepHelper = (type, data, logLevel) => {
5
+ const printCloudwatchHelper = (type, data, logLevel) => {
6
6
  const d = Object.keys(data).reduce((a, b) => {
7
7
  return [...a, `${b}=${data[b]}`];
8
8
  }, []);
9
9
  const msg = [`method=${type}`, ...d].join(',');
10
10
  renderer_1.RenderInternals.Log.info({ indent: false, logLevel }, msg);
11
11
  };
12
- exports.printLoggingGrepHelper = printLoggingGrepHelper;
12
+ exports.printCloudwatchHelper = printCloudwatchHelper;
@@ -1,5 +1,4 @@
1
- import type { ProviderSpecifics } from '@remotion/serverless';
2
- import type { FileNameAndSize } from '@remotion/serverless/client';
1
+ import type { FileNameAndSize } from './get-files-in-folder';
3
2
  export type LambdaErrorInfo = {
4
3
  type: 'renderer' | 'browser' | 'stitcher' | 'webhook' | 'artifact';
5
4
  message: string;
@@ -16,7 +15,7 @@ export type LambdaErrorInfo = {
16
15
  total: number;
17
16
  } | null;
18
17
  };
19
- export declare const getTmpDirStateIfENoSp: <Region extends string>(err: string, providerSpecifics: ProviderSpecifics<Region>) => LambdaErrorInfo['tmpDir'];
18
+ export declare const getTmpDirStateIfENoSp: (err: string) => LambdaErrorInfo['tmpDir'];
20
19
  export type EnhancedErrorInfo = LambdaErrorInfo & {
21
20
  /**
22
21
  * @deprecated Will always be an empty string.
@@ -1,12 +1,13 @@
1
1
  "use strict";
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
3
  exports.getTmpDirStateIfENoSp = void 0;
4
+ const get_files_in_folder_1 = require("./get-files-in-folder");
4
5
  const is_enosp_err_1 = require("./is-enosp-err");
5
- const getTmpDirStateIfENoSp = (err, providerSpecifics) => {
6
+ const getTmpDirStateIfENoSp = (err) => {
6
7
  if (!(0, is_enosp_err_1.errorIsOutOfSpaceError)(err)) {
7
8
  return null;
8
9
  }
9
- const files = providerSpecifics.getFolderFiles('/tmp');
10
+ const files = (0, get_files_in_folder_1.getFolderFiles)('/tmp');
10
11
  return {
11
12
  files: files
12
13
  .slice(0)
File without changes
@@ -0,0 +1 @@
1
+ "use strict";
@@ -1,20 +1,22 @@
1
- import type { SerializedInputProps } from '@remotion/serverless/client';
2
- import type { AwsRegion } from '../client';
1
+ import type { ProviderSpecifics } from '@remotion/serverless';
2
+ import { type SerializedInputProps } from '@remotion/serverless/client';
3
3
  type PropsType = 'input-props' | 'resolved-props';
4
4
  export declare const serializeOrThrow: (inputProps: Record<string, unknown>, propsType: PropsType) => string;
5
5
  export declare const getNeedsToUpload: (type: 'still' | 'video-or-audio', sizes: number[]) => boolean;
6
- export declare const compressInputProps: ({ stringifiedInputProps, region, userSpecifiedBucketName, propsType, needsToUpload, }: {
6
+ export declare const compressInputProps: <Region extends string>({ stringifiedInputProps, region, userSpecifiedBucketName, propsType, needsToUpload, providerSpecifics, }: {
7
7
  stringifiedInputProps: string;
8
- region: AwsRegion;
8
+ region: Region;
9
9
  userSpecifiedBucketName: string | null;
10
10
  propsType: PropsType;
11
11
  needsToUpload: boolean;
12
+ providerSpecifics: ProviderSpecifics<Region>;
12
13
  }) => Promise<SerializedInputProps>;
13
- export declare const decompressInputProps: ({ serialized, region, bucketName, expectedBucketOwner, propsType, }: {
14
+ export declare const decompressInputProps: <Region extends string>({ serialized, region, bucketName, expectedBucketOwner, propsType, providerSpecifics, }: {
14
15
  serialized: SerializedInputProps;
15
- region: AwsRegion;
16
+ region: Region;
16
17
  bucketName: string;
17
18
  expectedBucketOwner: string;
18
19
  propsType: PropsType;
20
+ providerSpecifics: ProviderSpecifics<Region>;
19
21
  }) => Promise<string>;
20
22
  export {};
@@ -1,18 +1,14 @@
1
1
  "use strict";
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
3
  exports.decompressInputProps = exports.compressInputProps = exports.getNeedsToUpload = exports.serializeOrThrow = void 0;
4
+ const client_1 = require("@remotion/serverless/client");
4
5
  const no_react_1 = require("remotion/no-react");
5
- const get_or_create_bucket_1 = require("../api/get-or-create-bucket");
6
- const io_1 = require("../functions/helpers/io");
7
- const constants_1 = require("./constants");
8
- const random_hash_1 = require("./random-hash");
9
- const stream_to_string_1 = require("./stream-to-string");
10
6
  const validate_webhook_1 = require("./validate-webhook");
11
7
  const makeKey = (type, hash) => {
12
8
  if (type === 'input-props') {
13
- return (0, constants_1.inputPropsKey)(hash);
9
+ return (0, client_1.inputPropsKey)(hash);
14
10
  }
15
- return (0, constants_1.resolvedPropsKey)(hash);
11
+ return (0, client_1.resolvedPropsKey)(hash);
16
12
  };
17
13
  const serializeOrThrow = (inputProps, propsType) => {
18
14
  try {
@@ -39,15 +35,16 @@ const getNeedsToUpload = (type, sizes) => {
39
35
  return false;
40
36
  };
41
37
  exports.getNeedsToUpload = getNeedsToUpload;
42
- const compressInputProps = async ({ stringifiedInputProps, region, userSpecifiedBucketName, propsType, needsToUpload, }) => {
43
- const hash = (0, random_hash_1.randomHash)();
38
+ const compressInputProps = async ({ stringifiedInputProps, region, userSpecifiedBucketName, propsType, needsToUpload, providerSpecifics, }) => {
39
+ const hash = providerSpecifics.randomHash();
44
40
  if (needsToUpload) {
45
- const bucketName = userSpecifiedBucketName !== null && userSpecifiedBucketName !== void 0 ? userSpecifiedBucketName : (await (0, get_or_create_bucket_1.internalGetOrCreateBucket)({
41
+ const bucketName = userSpecifiedBucketName !== null && userSpecifiedBucketName !== void 0 ? userSpecifiedBucketName : (await (0, client_1.internalGetOrCreateBucket)({
46
42
  region,
47
43
  enableFolderExpiry: null,
48
44
  customCredentials: null,
45
+ providerSpecifics,
49
46
  })).bucketName;
50
- await (0, io_1.lambdaWriteFile)({
47
+ await providerSpecifics.writeFile({
51
48
  body: stringifiedInputProps,
52
49
  bucketName,
53
50
  region,
@@ -69,18 +66,18 @@ const compressInputProps = async ({ stringifiedInputProps, region, userSpecified
69
66
  };
70
67
  };
71
68
  exports.compressInputProps = compressInputProps;
72
- const decompressInputProps = async ({ serialized, region, bucketName, expectedBucketOwner, propsType, }) => {
69
+ const decompressInputProps = async ({ serialized, region, bucketName, expectedBucketOwner, propsType, providerSpecifics, }) => {
73
70
  if (serialized.type === 'payload') {
74
71
  return serialized.payload;
75
72
  }
76
73
  try {
77
- const response = await (0, io_1.lambdaReadFile)({
74
+ const response = await providerSpecifics.readFile({
78
75
  bucketName,
79
76
  expectedBucketOwner,
80
77
  key: makeKey(propsType, serialized.hash),
81
78
  region,
82
79
  });
83
- const body = await (0, stream_to_string_1.streamToString)(response);
80
+ const body = await (0, client_1.streamToString)(response);
84
81
  const payload = body;
85
82
  return payload;
86
83
  }
package/package.json CHANGED
@@ -3,7 +3,7 @@
3
3
  "url": "https://github.com/remotion-dev/remotion/tree/main/packages/lambda"
4
4
  },
5
5
  "name": "@remotion/lambda",
6
- "version": "4.0.202",
6
+ "version": "4.0.204",
7
7
  "description": "Render Remotion videos on AWS Lambda",
8
8
  "main": "dist/index.js",
9
9
  "sideEffects": false,
@@ -22,13 +22,13 @@
22
22
  "@aws-sdk/s3-request-presigner": "3.583.0",
23
23
  "mime-types": "2.1.34",
24
24
  "zod": "3.22.3",
25
- "@remotion/bundler": "4.0.202",
26
- "@remotion/cli": "4.0.202",
27
- "@remotion/renderer": "4.0.202",
28
- "@remotion/studio-server": "4.0.202",
29
- "@remotion/streaming": "4.0.202",
30
- "remotion": "4.0.202",
31
- "@remotion/serverless": "4.0.202"
25
+ "@remotion/bundler": "4.0.204",
26
+ "@remotion/cli": "4.0.204",
27
+ "@remotion/renderer": "4.0.204",
28
+ "@remotion/studio-server": "4.0.204",
29
+ "@remotion/streaming": "4.0.204",
30
+ "@remotion/serverless": "4.0.204",
31
+ "remotion": "4.0.204"
32
32
  },
33
33
  "devDependencies": {
34
34
  "@types/mime-types": "2.1.1",
@@ -37,11 +37,11 @@
37
37
  "pureimage": "0.4.13",
38
38
  "vitest": "0.31.1",
39
39
  "zip-lib": "^0.7.2",
40
- "@remotion/bundler": "4.0.202",
41
- "@remotion/compositor-linux-arm64-gnu": "4.0.202"
40
+ "@remotion/bundler": "4.0.204",
41
+ "@remotion/compositor-linux-arm64-gnu": "4.0.204"
42
42
  },
43
43
  "peerDependencies": {
44
- "@remotion/bundler": "4.0.202"
44
+ "@remotion/bundler": "4.0.204"
45
45
  },
46
46
  "publishConfig": {
47
47
  "access": "public"
Binary file
@@ -1,3 +0,0 @@
1
- import type { _Object } from '@aws-sdk/client-s3';
2
- import type { AwsRegion } from '../../client';
3
- export declare const checkIfRenderExists: (contents: _Object[], renderId: string, bucketName: string, region: AwsRegion) => void;
@@ -1,15 +0,0 @@
1
- "use strict";
2
- Object.defineProperty(exports, "__esModule", { value: true });
3
- exports.checkIfRenderExists = void 0;
4
- const constants_1 = require("../../shared/constants");
5
- const checkIfRenderExists = (contents, renderId, bucketName, region) => {
6
- const initializedExists = Boolean(contents.find((c) => {
7
- var _a;
8
- return (_a = c.Key) === null || _a === void 0 ? void 0 : _a.startsWith((0, constants_1.initalizedMetadataKey)(renderId));
9
- }));
10
- if (!initializedExists) {
11
- // ! Error message is checked in progress handler and will be retried. Make sure to update
12
- throw new TypeError(`No render with ID "${renderId}" found in bucket ${bucketName} and region ${region}`);
13
- }
14
- };
15
- exports.checkIfRenderExists = checkIfRenderExists;
@@ -1,9 +0,0 @@
1
- import type { _Object } from '@aws-sdk/client-s3';
2
- import type { AwsRegion } from '../../pricing/aws-regions';
3
- import type { CleanupJob } from './get-files-to-delete';
4
- export declare const cleanupFiles: ({ bucket, region, contents, jobs, }: {
5
- bucket: string;
6
- region: AwsRegion;
7
- contents: _Object[];
8
- jobs: CleanupJob[];
9
- }) => Promise<number>;
@@ -1,25 +0,0 @@
1
- "use strict";
2
- Object.defineProperty(exports, "__esModule", { value: true });
3
- exports.cleanupFiles = void 0;
4
- const clean_items_1 = require("../../api/clean-items");
5
- const cleanupFiles = async ({ bucket, region, contents, jobs, }) => {
6
- const start = Date.now();
7
- await (0, clean_items_1.cleanItems)({
8
- bucket,
9
- region,
10
- list: jobs.map((item) => {
11
- var _a;
12
- if (item.type === 'exact') {
13
- return item.name;
14
- }
15
- if (item.type === 'prefix') {
16
- return (_a = contents.find((c) => { var _a; return (_a = c.Key) === null || _a === void 0 ? void 0 : _a.startsWith(item.name); })) === null || _a === void 0 ? void 0 : _a.Key;
17
- }
18
- throw new Error('unexpected in deleteChunks()');
19
- }),
20
- onAfterItemDeleted: () => undefined,
21
- onBeforeItemDeleted: () => undefined,
22
- });
23
- return Date.now() - start;
24
- };
25
- exports.cleanupFiles = cleanupFiles;
@@ -1,10 +0,0 @@
1
- import type { _Object } from '@aws-sdk/client-s3';
2
- import type { CleanupInfo } from '../../shared/constants';
3
- export declare const getCleanupProgress: ({ contents, output, chunkCount, renderId, hasAudio, hasVideo, }: {
4
- contents: _Object[];
5
- output: string | null;
6
- chunkCount: number;
7
- renderId: string;
8
- hasAudio: boolean;
9
- hasVideo: boolean;
10
- }) => null | CleanupInfo;
@@ -1,35 +0,0 @@
1
- "use strict";
2
- Object.defineProperty(exports, "__esModule", { value: true });
3
- exports.getCleanupProgress = void 0;
4
- const get_files_to_delete_1 = require("./get-files-to-delete");
5
- const getCleanupProgress = ({ contents, output, chunkCount, renderId, hasAudio, hasVideo, }) => {
6
- if (output === null) {
7
- return null;
8
- }
9
- const filesToDelete = (0, get_files_to_delete_1.getFilesToDelete)({
10
- chunkCount,
11
- renderId,
12
- hasAudio,
13
- hasVideo,
14
- });
15
- const filesStillThere = contents.filter((c) => {
16
- return filesToDelete.find((f) => {
17
- var _a;
18
- if (f.type === 'exact') {
19
- return f.name === c.Key;
20
- }
21
- if (f.type === 'prefix') {
22
- return (_a = c.Key) === null || _a === void 0 ? void 0 : _a.startsWith(f.name);
23
- }
24
- throw new Error('Unexpected in getCleanupProgress');
25
- });
26
- });
27
- const filesDeleted = Math.max(0, filesToDelete.length - filesStillThere.length);
28
- return {
29
- minFilesToDelete: filesToDelete.length,
30
- filesDeleted,
31
- // We don't know. Only if post render data is saved, we know the timing
32
- doneIn: null,
33
- };
34
- };
35
- exports.getCleanupProgress = getCleanupProgress;
@@ -1,7 +0,0 @@
1
- import type { _Object } from '@aws-sdk/client-s3';
2
- import type { EncodingProgress } from '../../defaults';
3
- export declare const getEncodingMetadata: ({ exists, frameCount, stepSize, }: {
4
- exists: _Object | undefined;
5
- frameCount: number;
6
- stepSize: number;
7
- }) => EncodingProgress | null;
@@ -1,15 +0,0 @@
1
- "use strict";
2
- Object.defineProperty(exports, "__esModule", { value: true });
3
- exports.getEncodingMetadata = void 0;
4
- const chunk_progress_1 = require("../../shared/chunk-progress");
5
- const getEncodingMetadata = ({ exists, frameCount, stepSize, }) => {
6
- if (!exists) {
7
- return null;
8
- }
9
- const framesEncoded = (0, chunk_progress_1.getProgressOfChunk)(exists.ETag);
10
- // We only report every 100 frames encoded so that we are able to report up to 2000 * 100 ETags => 200000 frames
11
- return {
12
- framesEncoded: Math.min(frameCount, framesEncoded * stepSize),
13
- };
14
- };
15
- exports.getEncodingMetadata = getEncodingMetadata;
@@ -1 +0,0 @@
1
- export declare const getEncodingProgressStepSize: (totalFrames: number) => number;
@@ -1,7 +0,0 @@
1
- "use strict";
2
- Object.defineProperty(exports, "__esModule", { value: true });
3
- exports.getEncodingProgressStepSize = void 0;
4
- const getEncodingProgressStepSize = (totalFrames) => {
5
- return Math.min(100, Math.max(5, totalFrames / 10));
6
- };
7
- exports.getEncodingProgressStepSize = getEncodingProgressStepSize;
@@ -1,10 +0,0 @@
1
- export type CleanupJob = {
2
- name: string;
3
- type: 'exact' | 'prefix';
4
- };
5
- export declare const getFilesToDelete: ({ chunkCount, renderId, hasVideo, hasAudio, }: {
6
- chunkCount: number;
7
- renderId: string;
8
- hasVideo: boolean;
9
- hasAudio: boolean;
10
- }) => CleanupJob[];