@remotion/lambda 4.0.200 → 4.0.202

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (142) hide show
  1. package/dist/api/bucket-exists.js +6 -2
  2. package/dist/api/clean-items.d.ts +2 -1
  3. package/dist/api/clean-items.js +2 -1
  4. package/dist/api/create-bucket.js +5 -5
  5. package/dist/api/delete-render.d.ts +2 -0
  6. package/dist/api/delete-render.js +6 -0
  7. package/dist/api/delete-site.d.ts +11 -5
  8. package/dist/api/delete-site.js +7 -1
  9. package/dist/api/deploy-site.d.ts +7 -3
  10. package/dist/api/deploy-site.js +17 -4
  11. package/dist/api/download-media.d.ts +2 -0
  12. package/dist/api/download-media.js +4 -1
  13. package/dist/api/get-aws-client.d.ts +2 -1
  14. package/dist/api/get-aws-client.js +2 -1
  15. package/dist/api/get-buckets.js +11 -3
  16. package/dist/api/get-compositions-on-lambda.d.ts +2 -1
  17. package/dist/api/get-compositions-on-lambda.js +3 -1
  18. package/dist/api/get-or-create-bucket.js +1 -0
  19. package/dist/api/get-render-progress.d.ts +1 -0
  20. package/dist/api/get-sites.d.ts +9 -4
  21. package/dist/api/get-sites.js +15 -5
  22. package/dist/api/make-lambda-payload.d.ts +4 -3
  23. package/dist/api/make-lambda-payload.js +9 -4
  24. package/dist/api/presign-url.d.ts +8 -3
  25. package/dist/api/presign-url.js +26 -12
  26. package/dist/api/render-media-on-lambda.d.ts +1 -0
  27. package/dist/api/render-media-on-lambda.js +2 -1
  28. package/dist/api/render-still-on-lambda.d.ts +1 -0
  29. package/dist/api/render-still-on-lambda.js +2 -1
  30. package/dist/api/upload-dir.d.ts +2 -1
  31. package/dist/api/upload-dir.js +2 -2
  32. package/dist/cli/args.d.ts +1 -0
  33. package/dist/cli/commands/render/render.js +3 -1
  34. package/dist/cli/commands/sites/create.js +22 -2
  35. package/dist/cli/commands/sites/rm.js +1 -0
  36. package/dist/cli/commands/sites/rmall.js +1 -0
  37. package/dist/cli/commands/still.d.ts +6 -1
  38. package/dist/cli/commands/still.js +2 -1
  39. package/dist/cli/helpers/progress-bar.d.ts +5 -0
  40. package/dist/cli/helpers/progress-bar.js +19 -1
  41. package/dist/cli/index.js +6 -1
  42. package/dist/functions/chunk-optimization/plan-frame-ranges.d.ts +1 -4
  43. package/dist/functions/helpers/check-if-render-exists.d.ts +3 -0
  44. package/dist/functions/helpers/check-if-render-exists.js +15 -0
  45. package/dist/functions/helpers/cleanup-props.d.ts +6 -2
  46. package/dist/functions/helpers/cleanup-props.js +3 -1
  47. package/dist/functions/helpers/delete-chunks.d.ts +9 -0
  48. package/dist/functions/helpers/delete-chunks.js +25 -0
  49. package/dist/functions/helpers/expected-out-name.d.ts +1 -1
  50. package/dist/functions/helpers/find-output-file-in-bucket.d.ts +2 -1
  51. package/dist/functions/helpers/find-output-file-in-bucket.js +2 -1
  52. package/dist/functions/helpers/get-browser-instance.d.ts +2 -3
  53. package/dist/functions/helpers/get-browser-instance.js +4 -3
  54. package/dist/functions/helpers/get-cleanup-progress.d.ts +10 -0
  55. package/dist/functions/helpers/get-cleanup-progress.js +35 -0
  56. package/dist/functions/helpers/get-current-region.d.ts +1 -1
  57. package/dist/functions/helpers/get-custom-out-name.d.ts +1 -1
  58. package/dist/functions/helpers/get-encoding-metadata.d.ts +7 -0
  59. package/dist/functions/helpers/get-encoding-metadata.js +15 -0
  60. package/dist/functions/helpers/get-encoding-progress-step-size.d.ts +1 -0
  61. package/dist/functions/helpers/get-encoding-progress-step-size.js +7 -0
  62. package/dist/functions/helpers/get-files-to-delete.d.ts +10 -0
  63. package/dist/functions/helpers/get-files-to-delete.js +52 -0
  64. package/dist/functions/helpers/get-final-encoding-status.d.ts +6 -0
  65. package/dist/functions/helpers/get-final-encoding-status.js +18 -0
  66. package/dist/functions/helpers/get-folder-size.d.ts +1 -0
  67. package/dist/functions/helpers/get-folder-size.js +8 -0
  68. package/dist/functions/helpers/get-lambdas-invoked-stats.d.ts +8 -0
  69. package/dist/functions/helpers/get-lambdas-invoked-stats.js +14 -0
  70. package/dist/functions/helpers/get-overall-progress-s3.d.ts +2 -1
  71. package/dist/functions/helpers/get-overall-progress-s3.js +2 -1
  72. package/dist/functions/helpers/get-post-render-data.d.ts +8 -0
  73. package/dist/functions/helpers/get-post-render-data.js +22 -0
  74. package/dist/functions/helpers/get-progress.d.ts +2 -1
  75. package/dist/functions/helpers/get-progress.js +2 -1
  76. package/dist/functions/helpers/get-render-metadata.d.ts +8 -0
  77. package/dist/functions/helpers/get-render-metadata.js +17 -0
  78. package/dist/functions/helpers/get-rendered-frames-progress.d.ts +8 -0
  79. package/dist/functions/helpers/get-rendered-frames-progress.js +37 -0
  80. package/dist/functions/helpers/get-time-to-finish.d.ts +5 -0
  81. package/dist/functions/helpers/get-time-to-finish.js +13 -0
  82. package/dist/functions/helpers/io.d.ts +33 -3
  83. package/dist/functions/helpers/io.js +104 -3
  84. package/dist/functions/helpers/merge-chunks.d.ts +1 -0
  85. package/dist/functions/helpers/merge-chunks.js +2 -0
  86. package/dist/functions/helpers/overall-render-progress.d.ts +2 -1
  87. package/dist/functions/helpers/overall-render-progress.js +2 -1
  88. package/dist/functions/helpers/print-cloudwatch-helper.d.ts +1 -1
  89. package/dist/functions/helpers/print-cloudwatch-helper.js +3 -3
  90. package/dist/functions/helpers/read-with-progress.d.ts +2 -1
  91. package/dist/functions/helpers/read-with-progress.js +2 -2
  92. package/dist/functions/helpers/streaming-payloads.d.ts +19 -0
  93. package/dist/functions/helpers/streaming-payloads.js +25 -0
  94. package/dist/functions/helpers/write-lambda-error.d.ts +3 -2
  95. package/dist/functions/helpers/write-lambda-error.js +2 -3
  96. package/dist/functions/helpers/write-post-render-data.d.ts +9 -0
  97. package/dist/functions/helpers/write-post-render-data.js +18 -0
  98. package/dist/functions/launch.js +8 -0
  99. package/dist/functions/merge.d.ts +9 -0
  100. package/dist/functions/merge.js +61 -0
  101. package/dist/functions/progress.js +1 -0
  102. package/dist/functions/renderer.js +2 -0
  103. package/dist/functions/start.js +3 -0
  104. package/dist/functions/still.js +6 -0
  105. package/dist/index.d.ts +2 -2
  106. package/dist/internals.d.ts +12 -9
  107. package/dist/io/delete-file.js +2 -2
  108. package/dist/io/head-file.js +6 -2
  109. package/dist/io/list-objects.js +12 -3
  110. package/dist/io/read-file.d.ts +2 -1
  111. package/dist/io/read-file.js +6 -2
  112. package/dist/io/write-file.js +6 -2
  113. package/dist/shared/__mocks__/read-dir.js +2 -2
  114. package/dist/shared/aws-clients.js +15 -2
  115. package/dist/shared/chunk-progress.d.ts +9 -0
  116. package/dist/shared/chunk-progress.js +2034 -0
  117. package/dist/shared/cleanup-serialized-input-props.d.ts +4 -2
  118. package/dist/shared/cleanup-serialized-input-props.js +4 -2
  119. package/dist/shared/compress-props.d.ts +6 -8
  120. package/dist/shared/compress-props.js +14 -11
  121. package/dist/shared/get-etag.d.ts +1 -1
  122. package/dist/shared/get-etag.js +33 -12
  123. package/dist/shared/get-s3-client.d.ts +5 -1
  124. package/dist/shared/get-s3-client.js +2 -1
  125. package/dist/shared/get-s3-operations.d.ts +2 -1
  126. package/dist/shared/get-s3-operations.js +38 -20
  127. package/dist/shared/get-service-client.d.ts +2 -1
  128. package/dist/shared/get-service-client.js +4 -2
  129. package/dist/shared/lifecycle-rules.d.ts +2 -1
  130. package/dist/shared/lifecycle-rules.js +17 -7
  131. package/dist/shared/parse-chunk-key.d.ts +5 -0
  132. package/dist/shared/parse-chunk-key.js +15 -0
  133. package/dist/shared/parse-lambda-initialized-key.d.ts +5 -0
  134. package/dist/shared/parse-lambda-initialized-key.js +15 -0
  135. package/dist/shared/read-dir.d.ts +6 -5
  136. package/dist/shared/read-dir.js +9 -6
  137. package/dist/shared/serialize-props.d.ts +14 -0
  138. package/dist/shared/serialize-props.js +36 -0
  139. package/package.json +11 -11
  140. package/remotionlambda-arm64.zip +0 -0
  141. package/dist/functions/provider-implementation.d.ts +0 -0
  142. package/dist/functions/provider-implementation.js +0 -1
@@ -1,12 +1,14 @@
1
1
  import type { CloudProvider, ProviderSpecifics } from '@remotion/serverless';
2
2
  import { type SerializedInputProps } from '@remotion/serverless/client';
3
- export declare const cleanupSerializedInputProps: <Provider extends CloudProvider<string, Record<string, unknown>>>({ serialized, region, providerSpecifics, }: {
3
+ export declare const cleanupSerializedInputProps: <Provider extends CloudProvider<string, Record<string, unknown>>>({ serialized, region, providerSpecifics, forcePathStyle, }: {
4
4
  serialized: SerializedInputProps;
5
5
  region: Provider["region"];
6
6
  providerSpecifics: ProviderSpecifics<Provider>;
7
+ forcePathStyle: boolean;
7
8
  }) => Promise<number>;
8
- export declare const cleanupSerializedResolvedProps: <Provider extends CloudProvider<string, Record<string, unknown>>>({ serialized, region, providerSpecifics, }: {
9
+ export declare const cleanupSerializedResolvedProps: <Provider extends CloudProvider<string, Record<string, unknown>>>({ serialized, region, providerSpecifics, forcePathStyle, }: {
9
10
  serialized: SerializedInputProps;
10
11
  region: Provider["region"];
11
12
  providerSpecifics: ProviderSpecifics<Provider>;
13
+ forcePathStyle: boolean;
12
14
  }) => Promise<number>;
@@ -2,7 +2,7 @@
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
3
  exports.cleanupSerializedResolvedProps = exports.cleanupSerializedInputProps = void 0;
4
4
  const client_1 = require("@remotion/serverless/client");
5
- const cleanupSerializedInputProps = async ({ serialized, region, providerSpecifics, }) => {
5
+ const cleanupSerializedInputProps = async ({ serialized, region, providerSpecifics, forcePathStyle, }) => {
6
6
  if (serialized.type === 'payload') {
7
7
  return 0;
8
8
  }
@@ -12,11 +12,12 @@ const cleanupSerializedInputProps = async ({ serialized, region, providerSpecifi
12
12
  key: (0, client_1.inputPropsKey)(serialized.hash),
13
13
  region,
14
14
  customCredentials: null,
15
+ forcePathStyle,
15
16
  });
16
17
  return Date.now() - time;
17
18
  };
18
19
  exports.cleanupSerializedInputProps = cleanupSerializedInputProps;
19
- const cleanupSerializedResolvedProps = async ({ serialized, region, providerSpecifics, }) => {
20
+ const cleanupSerializedResolvedProps = async ({ serialized, region, providerSpecifics, forcePathStyle, }) => {
20
21
  if (serialized.type === 'payload') {
21
22
  return 0;
22
23
  }
@@ -26,6 +27,7 @@ const cleanupSerializedResolvedProps = async ({ serialized, region, providerSpec
26
27
  key: (0, client_1.resolvedPropsKey)(serialized.hash),
27
28
  region,
28
29
  customCredentials: null,
30
+ forcePathStyle,
29
31
  });
30
32
  return Date.now() - time;
31
33
  };
@@ -1,22 +1,20 @@
1
- import type { ProviderSpecifics } from '@remotion/serverless';
2
- import { type SerializedInputProps } from '@remotion/serverless/client';
1
+ import type { SerializedInputProps } from '@remotion/serverless/client';
2
+ import type { AwsRegion } from '../client';
3
3
  type PropsType = 'input-props' | 'resolved-props';
4
4
  export declare const serializeOrThrow: (inputProps: Record<string, unknown>, propsType: PropsType) => string;
5
5
  export declare const getNeedsToUpload: (type: 'still' | 'video-or-audio', sizes: number[]) => boolean;
6
- export declare const compressInputProps: <Region extends string>({ stringifiedInputProps, region, userSpecifiedBucketName, propsType, needsToUpload, providerSpecifics, }: {
6
+ export declare const compressInputProps: ({ stringifiedInputProps, region, userSpecifiedBucketName, propsType, needsToUpload, }: {
7
7
  stringifiedInputProps: string;
8
- region: Region;
8
+ region: AwsRegion;
9
9
  userSpecifiedBucketName: string | null;
10
10
  propsType: PropsType;
11
11
  needsToUpload: boolean;
12
- providerSpecifics: ProviderSpecifics<Region>;
13
12
  }) => Promise<SerializedInputProps>;
14
- export declare const decompressInputProps: <Region extends string>({ serialized, region, bucketName, expectedBucketOwner, propsType, providerSpecifics, }: {
13
+ export declare const decompressInputProps: ({ serialized, region, bucketName, expectedBucketOwner, propsType, }: {
15
14
  serialized: SerializedInputProps;
16
- region: Region;
15
+ region: AwsRegion;
17
16
  bucketName: string;
18
17
  expectedBucketOwner: string;
19
18
  propsType: PropsType;
20
- providerSpecifics: ProviderSpecifics<Region>;
21
19
  }) => Promise<string>;
22
20
  export {};
@@ -1,14 +1,18 @@
1
1
  "use strict";
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
3
  exports.decompressInputProps = exports.compressInputProps = exports.getNeedsToUpload = exports.serializeOrThrow = void 0;
4
- const client_1 = require("@remotion/serverless/client");
5
4
  const no_react_1 = require("remotion/no-react");
5
+ const get_or_create_bucket_1 = require("../api/get-or-create-bucket");
6
+ const io_1 = require("../functions/helpers/io");
7
+ const constants_1 = require("./constants");
8
+ const random_hash_1 = require("./random-hash");
9
+ const stream_to_string_1 = require("./stream-to-string");
6
10
  const validate_webhook_1 = require("./validate-webhook");
7
11
  const makeKey = (type, hash) => {
8
12
  if (type === 'input-props') {
9
- return (0, client_1.inputPropsKey)(hash);
13
+ return (0, constants_1.inputPropsKey)(hash);
10
14
  }
11
- return (0, client_1.resolvedPropsKey)(hash);
15
+ return (0, constants_1.resolvedPropsKey)(hash);
12
16
  };
13
17
  const serializeOrThrow = (inputProps, propsType) => {
14
18
  try {
@@ -35,16 +39,15 @@ const getNeedsToUpload = (type, sizes) => {
35
39
  return false;
36
40
  };
37
41
  exports.getNeedsToUpload = getNeedsToUpload;
38
- const compressInputProps = async ({ stringifiedInputProps, region, userSpecifiedBucketName, propsType, needsToUpload, providerSpecifics, }) => {
39
- const hash = providerSpecifics.randomHash();
42
+ const compressInputProps = async ({ stringifiedInputProps, region, userSpecifiedBucketName, propsType, needsToUpload, }) => {
43
+ const hash = (0, random_hash_1.randomHash)();
40
44
  if (needsToUpload) {
41
- const bucketName = userSpecifiedBucketName !== null && userSpecifiedBucketName !== void 0 ? userSpecifiedBucketName : (await (0, client_1.internalGetOrCreateBucket)({
45
+ const bucketName = userSpecifiedBucketName !== null && userSpecifiedBucketName !== void 0 ? userSpecifiedBucketName : (await (0, get_or_create_bucket_1.internalGetOrCreateBucket)({
42
46
  region,
43
47
  enableFolderExpiry: null,
44
48
  customCredentials: null,
45
- providerSpecifics,
46
49
  })).bucketName;
47
- await providerSpecifics.writeFile({
50
+ await (0, io_1.lambdaWriteFile)({
48
51
  body: stringifiedInputProps,
49
52
  bucketName,
50
53
  region,
@@ -66,18 +69,18 @@ const compressInputProps = async ({ stringifiedInputProps, region, userSpecified
66
69
  };
67
70
  };
68
71
  exports.compressInputProps = compressInputProps;
69
- const decompressInputProps = async ({ serialized, region, bucketName, expectedBucketOwner, propsType, providerSpecifics, }) => {
72
+ const decompressInputProps = async ({ serialized, region, bucketName, expectedBucketOwner, propsType, }) => {
70
73
  if (serialized.type === 'payload') {
71
74
  return serialized.payload;
72
75
  }
73
76
  try {
74
- const response = await providerSpecifics.readFile({
77
+ const response = await (0, io_1.lambdaReadFile)({
75
78
  bucketName,
76
79
  expectedBucketOwner,
77
80
  key: makeKey(propsType, serialized.hash),
78
81
  region,
79
82
  });
80
- const body = await (0, client_1.streamToString)(response);
83
+ const body = await (0, stream_to_string_1.streamToString)(response);
81
84
  const payload = body;
82
85
  return payload;
83
86
  }
@@ -1 +1 @@
1
- export declare const getEtagOfFile: (filePath: string) => Promise<string>;
1
+ export declare const getEtagOfFile: (filePath: string, onProgress: (bytes: number) => void) => () => Promise<string>;
@@ -8,17 +8,38 @@ const node_crypto_1 = __importDefault(require("node:crypto"));
8
8
  const node_fs_1 = __importDefault(require("node:fs"));
9
9
  const chunk = 1024 * 1024 * 5; // 5MB
10
10
  const md5 = (data) => node_crypto_1.default.createHash('md5').update(data).digest('hex');
11
- const getEtagOfFile = async (filePath) => {
12
- const stream = await node_fs_1.default.promises.readFile(filePath);
13
- if (stream.length <= chunk) {
14
- return `"${md5(stream)}"`;
15
- }
16
- const md5Chunks = [];
17
- const chunksNumber = Math.ceil(stream.length / chunk);
18
- for (let i = 0; i < chunksNumber; i++) {
19
- const chunkStream = stream.slice(i * chunk, (i + 1) * chunk);
20
- md5Chunks.push(md5(chunkStream));
21
- }
22
- return `"${md5(Buffer.from(md5Chunks.join(''), 'hex'))}-${chunksNumber}"`;
11
+ const getEtagOfFile = (filePath, onProgress) => {
12
+ const calc = async () => {
13
+ const size = await node_fs_1.default.promises.stat(filePath).then((s) => s.size);
14
+ if (size <= chunk) {
15
+ const buffer = await node_fs_1.default.promises.readFile(filePath);
16
+ return `"${md5(buffer)}"`;
17
+ }
18
+ const stream = node_fs_1.default.createReadStream(filePath, {
19
+ highWaterMark: chunk,
20
+ });
21
+ const md5Chunks = [];
22
+ const chunksNumber = Math.ceil(size / chunk);
23
+ return new Promise((resolve, reject) => {
24
+ stream.on('data', (c) => {
25
+ md5Chunks.push(md5(c));
26
+ onProgress(c.length);
27
+ });
28
+ stream.on('end', () => {
29
+ resolve(`"${md5(Buffer.from(md5Chunks.join(''), 'hex'))}-${chunksNumber}"`);
30
+ });
31
+ stream.on('error', (err) => {
32
+ reject(err);
33
+ });
34
+ });
35
+ };
36
+ let tag = null;
37
+ return async () => {
38
+ if (tag !== null) {
39
+ return tag;
40
+ }
41
+ tag = await calc();
42
+ return tag;
43
+ };
23
44
  };
24
45
  exports.getEtagOfFile = getEtagOfFile;
@@ -2,4 +2,8 @@ import type { S3Client } from '@aws-sdk/client-s3';
2
2
  import type { CustomCredentials } from '../client';
3
3
  import type { AwsProvider } from '../functions/aws-implementation';
4
4
  import type { AwsRegion } from '../regions';
5
- export declare const getS3Client: (region: AwsRegion, customCredentials: CustomCredentials<AwsProvider> | null) => S3Client;
5
+ export declare const getS3Client: ({ region, customCredentials, forcePathStyle, }: {
6
+ region: AwsRegion;
7
+ customCredentials: CustomCredentials<AwsProvider> | null;
8
+ forcePathStyle: boolean;
9
+ }) => S3Client;
@@ -2,12 +2,13 @@
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
3
  exports.getS3Client = void 0;
4
4
  const get_service_client_1 = require("./get-service-client");
5
- const getS3Client = (region, customCredentials) => {
5
+ const getS3Client = ({ region, customCredentials, forcePathStyle, }) => {
6
6
  var _a;
7
7
  return (0, get_service_client_1.getServiceClient)({
8
8
  region: (_a = customCredentials === null || customCredentials === void 0 ? void 0 : customCredentials.region) !== null && _a !== void 0 ? _a : region,
9
9
  service: 's3',
10
10
  customCredentials,
11
+ forcePathStyle,
11
12
  });
12
13
  };
13
14
  exports.getS3Client = getS3Client;
@@ -1,8 +1,9 @@
1
1
  import type { _Object } from '@aws-sdk/client-s3';
2
- export declare const getS3DiffOperations: ({ objects, bundle, prefix, }: {
2
+ export declare const getS3DiffOperations: ({ objects, bundle, prefix, onProgress, }: {
3
3
  objects: _Object[];
4
4
  bundle: string;
5
5
  prefix: string;
6
+ onProgress: (bytes: number) => void;
6
7
  }) => Promise<{
7
8
  toDelete: _Object[];
8
9
  toUpload: string[];
@@ -2,31 +2,49 @@
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
3
  exports.getS3DiffOperations = void 0;
4
4
  const read_dir_1 = require("./read-dir");
5
- const getS3DiffOperations = async ({ objects, bundle, prefix, }) => {
6
- const dir = await (0, read_dir_1.readDirectory)({
5
+ const getS3DiffOperations = async ({ objects, bundle, prefix, onProgress, }) => {
6
+ var _a, _b, _c;
7
+ let totalBytes = 0;
8
+ const dir = (0, read_dir_1.readDirectory)({
7
9
  dir: bundle,
8
10
  etags: {},
9
11
  originalDir: bundle,
12
+ onProgress: (bytes) => {
13
+ totalBytes += bytes;
14
+ onProgress(totalBytes);
15
+ },
10
16
  });
11
- const filesOnS3ButNotLocal = objects.filter((fileOnS3) => {
12
- var _a;
17
+ const filesOnS3ButNotLocal = [];
18
+ for (const fileOnS3 of objects) {
13
19
  const key = (_a = fileOnS3.Key) === null || _a === void 0 ? void 0 : _a.substring(prefix.length + 1);
14
- return !dir[key];
15
- });
16
- const localFilesNotOnS3 = Object.keys(dir).filter((d) => {
17
- return !objects.find((o) => {
18
- var _a;
19
- const key = (_a = o.Key) === null || _a === void 0 ? void 0 : _a.substring(prefix.length + 1);
20
- return key === d && o.ETag === dir[d];
21
- });
22
- });
23
- const existing = Object.keys(dir).filter((d) => {
24
- return objects.find((o) => {
25
- var _a;
26
- const key = (_a = o.Key) === null || _a === void 0 ? void 0 : _a.substring(prefix.length + 1);
27
- return key === d && o.ETag === dir[d];
28
- });
29
- });
20
+ if (!dir[key]) {
21
+ filesOnS3ButNotLocal.push(fileOnS3);
22
+ }
23
+ }
24
+ const localFilesNotOnS3 = [];
25
+ for (const d of Object.keys(dir)) {
26
+ let found;
27
+ for (const o of objects) {
28
+ const key = (_b = o.Key) === null || _b === void 0 ? void 0 : _b.substring(prefix.length + 1);
29
+ if (key === d && o.ETag === (await dir[d]())) {
30
+ found = o;
31
+ break;
32
+ }
33
+ }
34
+ if (!found) {
35
+ localFilesNotOnS3.push(d);
36
+ }
37
+ }
38
+ const existing = [];
39
+ for (const d of Object.keys(dir)) {
40
+ for (const o of objects) {
41
+ const key = (_c = o.Key) === null || _c === void 0 ? void 0 : _c.substring(prefix.length + 1);
42
+ if (key === d && o.ETag === (await dir[d]())) {
43
+ existing.push(d);
44
+ break;
45
+ }
46
+ }
47
+ }
30
48
  return {
31
49
  toDelete: filesOnS3ButNotLocal,
32
50
  toUpload: localFilesNotOnS3,
@@ -15,8 +15,9 @@ export type ServiceMapping = {
15
15
  servicequotas: ServiceQuotasClient;
16
16
  sts: STSClient;
17
17
  };
18
- export declare const getServiceClient: <T extends keyof ServiceMapping>({ region, service, customCredentials, }: {
18
+ export declare const getServiceClient: <T extends keyof ServiceMapping>({ region, service, customCredentials, forcePathStyle, }: {
19
19
  region: AwsRegion;
20
20
  service: T;
21
21
  customCredentials: CustomCredentials<AwsProvider> | null;
22
+ forcePathStyle: boolean;
22
23
  }) => ServiceMapping[T];
@@ -11,7 +11,7 @@ const no_react_1 = require("remotion/no-react");
11
11
  const check_credentials_1 = require("./check-credentials");
12
12
  const constants_1 = require("./constants");
13
13
  const get_credentials_1 = require("./get-credentials");
14
- const getCredentialsHash = ({ customCredentials, region, service, }) => {
14
+ const getCredentialsHash = ({ customCredentials, region, service, forcePathStyle, }) => {
15
15
  const hashComponents = {};
16
16
  if (process.env.REMOTION_SKIP_AWS_CREDENTIALS_CHECK) {
17
17
  hashComponents.credentials = {
@@ -45,10 +45,11 @@ const getCredentialsHash = ({ customCredentials, region, service, }) => {
45
45
  hashComponents.customCredentials = customCredentials;
46
46
  hashComponents.region = region;
47
47
  hashComponents.service = service;
48
+ hashComponents.forcePathStyle = forcePathStyle;
48
49
  return (0, no_react_1.random)(JSON.stringify(hashComponents)).toString().replace('0.', '');
49
50
  };
50
51
  const _clients = {};
51
- const getServiceClient = ({ region, service, customCredentials, }) => {
52
+ const getServiceClient = ({ region, service, customCredentials, forcePathStyle, }) => {
52
53
  var _a;
53
54
  const Client = (() => {
54
55
  if (service === 'cloudwatch') {
@@ -75,6 +76,7 @@ const getServiceClient = ({ region, service, customCredentials, }) => {
75
76
  region,
76
77
  customCredentials,
77
78
  service,
79
+ forcePathStyle,
78
80
  });
79
81
  if (!_clients[key]) {
80
82
  (0, check_credentials_1.checkCredentials)();
@@ -1,9 +1,10 @@
1
1
  import type { CustomCredentials } from '@remotion/serverless/client';
2
2
  import type { AwsProvider } from '../functions/aws-implementation';
3
3
  import type { AwsRegion } from '../regions';
4
- export declare const applyLifeCyleOperation: ({ enableFolderExpiry, bucketName, region, customCredentials, }: {
4
+ export declare const applyLifeCyleOperation: ({ enableFolderExpiry, bucketName, region, customCredentials, forcePathStyle, }: {
5
5
  enableFolderExpiry: boolean | null;
6
6
  bucketName: string;
7
7
  region: AwsRegion;
8
8
  customCredentials: CustomCredentials<AwsProvider> | null;
9
+ forcePathStyle: boolean;
9
10
  }) => Promise<void>;
@@ -5,7 +5,7 @@ const client_s3_1 = require("@aws-sdk/client-s3");
5
5
  const apply_lifecyle_1 = require("../functions/helpers/apply-lifecyle");
6
6
  const lifecycle_1 = require("../functions/helpers/lifecycle");
7
7
  const get_s3_client_1 = require("./get-s3-client");
8
- const createLCRules = async ({ bucketName, region, customCredentials, }) => {
8
+ const createLCRules = async ({ bucketName, region, customCredentials, forcePathStyle, }) => {
9
9
  var _a;
10
10
  const lcRules = (0, lifecycle_1.getLifeCycleRules)();
11
11
  // create the lifecyle rules
@@ -15,7 +15,7 @@ const createLCRules = async ({ bucketName, region, customCredentials, }) => {
15
15
  });
16
16
  const createCommand = new client_s3_1.PutBucketLifecycleConfigurationCommand(createCommandInput);
17
17
  try {
18
- await (0, get_s3_client_1.getS3Client)(region, customCredentials).send(createCommand);
18
+ await (0, get_s3_client_1.getS3Client)({ region, customCredentials, forcePathStyle }).send(createCommand);
19
19
  }
20
20
  catch (err) {
21
21
  if ((_a = err.stack) === null || _a === void 0 ? void 0 : _a.includes('AccessDenied')) {
@@ -23,13 +23,13 @@ const createLCRules = async ({ bucketName, region, customCredentials, }) => {
23
23
  }
24
24
  }
25
25
  };
26
- const deleteLCRules = async ({ bucketName, region, customCredentials, }) => {
26
+ const deleteLCRules = async ({ bucketName, region, customCredentials, forcePathStyle, }) => {
27
27
  var _a;
28
28
  const deleteCommandInput = (0, apply_lifecyle_1.deleteLifeCycleInput)({
29
29
  bucketName,
30
30
  });
31
31
  try {
32
- await (0, get_s3_client_1.getS3Client)(region, customCredentials).send(new client_s3_1.DeleteBucketLifecycleCommand(deleteCommandInput));
32
+ await (0, get_s3_client_1.getS3Client)({ region, customCredentials, forcePathStyle }).send(new client_s3_1.DeleteBucketLifecycleCommand(deleteCommandInput));
33
33
  }
34
34
  catch (err) {
35
35
  if ((_a = err.stack) === null || _a === void 0 ? void 0 : _a.includes('AccessDenied')) {
@@ -37,15 +37,25 @@ const deleteLCRules = async ({ bucketName, region, customCredentials, }) => {
37
37
  }
38
38
  }
39
39
  };
40
- const applyLifeCyleOperation = async ({ enableFolderExpiry, bucketName, region, customCredentials, }) => {
40
+ const applyLifeCyleOperation = async ({ enableFolderExpiry, bucketName, region, customCredentials, forcePathStyle, }) => {
41
41
  if (enableFolderExpiry === null) {
42
42
  return;
43
43
  }
44
44
  if (enableFolderExpiry === true) {
45
- await createLCRules({ bucketName, region, customCredentials });
45
+ await createLCRules({
46
+ bucketName,
47
+ region,
48
+ customCredentials,
49
+ forcePathStyle,
50
+ });
46
51
  }
47
52
  else {
48
- await deleteLCRules({ bucketName, region, customCredentials });
53
+ await deleteLCRules({
54
+ bucketName,
55
+ region,
56
+ customCredentials,
57
+ forcePathStyle,
58
+ });
49
59
  }
50
60
  };
51
61
  exports.applyLifeCyleOperation = applyLifeCyleOperation;
@@ -0,0 +1,5 @@
1
+ export declare const parseLambdaChunkKey: (key: string) => {
2
+ renderId: string;
3
+ chunk: number;
4
+ type: string;
5
+ };
@@ -0,0 +1,15 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.parseLambdaChunkKey = void 0;
4
+ const parseLambdaChunkKey = (key) => {
5
+ const match = key.match(/^renders\/(.*)\/chunks\/chunk:([0-9]+):(video|audio)$/);
6
+ if (!match) {
7
+ throw new Error(`Cannot parse filename ${key} into timing information. Malformed data.`);
8
+ }
9
+ return {
10
+ renderId: match[1],
11
+ chunk: Number(match[2]),
12
+ type: match[3],
13
+ };
14
+ };
15
+ exports.parseLambdaChunkKey = parseLambdaChunkKey;
@@ -0,0 +1,5 @@
1
+ export declare const parseLambdaInitializedKey: (key: string) => {
2
+ renderId: string;
3
+ chunk: number;
4
+ attempt: number;
5
+ };
@@ -0,0 +1,15 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.parseLambdaInitializedKey = void 0;
4
+ const parseLambdaInitializedKey = (key) => {
5
+ const match = key.match(/^renders\/(.*)\/lambda-initialized-chunk:([0-9]+)-attempt:([0-9]+).txt$/);
6
+ if (!match) {
7
+ throw new Error(`Cannot parse filename ${key} into timing information. Malformed data.`);
8
+ }
9
+ return {
10
+ renderId: match[1],
11
+ chunk: Number(match[2]),
12
+ attempt: Number(match[3]),
13
+ };
14
+ };
15
+ exports.parseLambdaInitializedKey = parseLambdaInitializedKey;
@@ -1,9 +1,10 @@
1
- export declare function readDirectory({ dir, etags, originalDir, }: {
1
+ export declare function readDirectory({ dir, etags, originalDir, onProgress, }: {
2
2
  dir: string;
3
3
  etags: {
4
- [key: string]: string;
4
+ [key: string]: () => Promise<string>;
5
5
  };
6
6
  originalDir: string;
7
- }): Promise<{
8
- [key: string]: string;
9
- }>;
7
+ onProgress: (bytes: number) => void;
8
+ }): {
9
+ [key: string]: () => Promise<string>;
10
+ };
@@ -29,7 +29,7 @@ const path = __importStar(require("node:path"));
29
29
  const get_etag_1 = require("./get-etag");
30
30
  // Function to recursively read a directory and return a list of files
31
31
  // with their etags and file names
32
- async function readDirectory({ dir, etags, originalDir, }) {
32
+ function readDirectory({ dir, etags, originalDir, onProgress, }) {
33
33
  const files = fs.readdirSync(dir);
34
34
  for (const file of files) {
35
35
  if (file.startsWith('.DS_Store'))
@@ -38,19 +38,22 @@ async function readDirectory({ dir, etags, originalDir, }) {
38
38
  if (fs.lstatSync(filePath).isDirectory()) {
39
39
  etags = {
40
40
  ...etags,
41
- ...(await readDirectory({ dir: filePath, etags, originalDir })),
41
+ ...readDirectory({
42
+ dir: filePath,
43
+ etags,
44
+ originalDir,
45
+ onProgress,
46
+ }),
42
47
  };
43
48
  continue;
44
49
  }
45
50
  // eslint-disable-next-line no-lonely-if
46
51
  if (fs.lstatSync(filePath).isSymbolicLink()) {
47
52
  const realPath = fs.realpathSync(filePath);
48
- etags[path.relative(originalDir, filePath)] =
49
- await (0, get_etag_1.getEtagOfFile)(realPath);
53
+ etags[path.relative(originalDir, filePath)] = (0, get_etag_1.getEtagOfFile)(realPath, onProgress);
50
54
  }
51
55
  else {
52
- etags[path.relative(originalDir, filePath)] =
53
- await (0, get_etag_1.getEtagOfFile)(filePath);
56
+ etags[path.relative(originalDir, filePath)] = (0, get_etag_1.getEtagOfFile)(filePath, onProgress);
54
57
  }
55
58
  }
56
59
  // Return the list of files with their etags and file names
@@ -0,0 +1,14 @@
1
+ type SerializedJSONWithCustomFields = {
2
+ serializedString: string;
3
+ customDateUsed: boolean;
4
+ customFileUsed: boolean;
5
+ mapUsed: boolean;
6
+ setUsed: boolean;
7
+ };
8
+ export declare const FILE_TOKEN = "remotion-file:";
9
+ export declare const serializeJSONWithDate: ({ data, indent, staticBase, }: {
10
+ data: Record<string, unknown>;
11
+ indent: number | undefined;
12
+ staticBase: string | null;
13
+ }) => SerializedJSONWithCustomFields;
14
+ export {};
@@ -0,0 +1,36 @@
1
+ "use strict";
2
+ // Must keep this file in sync with the one in packages/core/src/input-props-serialization.ts!
3
+ Object.defineProperty(exports, "__esModule", { value: true });
4
+ exports.serializeJSONWithDate = exports.FILE_TOKEN = void 0;
5
+ const DATE_TOKEN = 'remotion-date:';
6
+ exports.FILE_TOKEN = 'remotion-file:';
7
+ const serializeJSONWithDate = ({ data, indent, staticBase, }) => {
8
+ let customDateUsed = false;
9
+ let customFileUsed = false;
10
+ let mapUsed = false;
11
+ let setUsed = false;
12
+ const serializedString = JSON.stringify(data, function (key, value) {
13
+ const item = this[key];
14
+ if (item instanceof Date) {
15
+ customDateUsed = true;
16
+ return `${DATE_TOKEN}${item.toISOString()}`;
17
+ }
18
+ if (item instanceof Map) {
19
+ mapUsed = true;
20
+ return value;
21
+ }
22
+ if (item instanceof Set) {
23
+ setUsed = true;
24
+ return value;
25
+ }
26
+ if (typeof item === 'string' &&
27
+ staticBase !== null &&
28
+ item.startsWith(staticBase)) {
29
+ customFileUsed = true;
30
+ return `${exports.FILE_TOKEN}${item.replace(staticBase + '/', '')}`;
31
+ }
32
+ return value;
33
+ }, indent);
34
+ return { serializedString, customDateUsed, customFileUsed, mapUsed, setUsed };
35
+ };
36
+ exports.serializeJSONWithDate = serializeJSONWithDate;
package/package.json CHANGED
@@ -3,7 +3,7 @@
3
3
  "url": "https://github.com/remotion-dev/remotion/tree/main/packages/lambda"
4
4
  },
5
5
  "name": "@remotion/lambda",
6
- "version": "4.0.200",
6
+ "version": "4.0.202",
7
7
  "description": "Render Remotion videos on AWS Lambda",
8
8
  "main": "dist/index.js",
9
9
  "sideEffects": false,
@@ -22,13 +22,13 @@
22
22
  "@aws-sdk/s3-request-presigner": "3.583.0",
23
23
  "mime-types": "2.1.34",
24
24
  "zod": "3.22.3",
25
- "@remotion/bundler": "4.0.200",
26
- "@remotion/renderer": "4.0.200",
27
- "@remotion/cli": "4.0.200",
28
- "@remotion/studio-server": "4.0.200",
29
- "@remotion/streaming": "4.0.200",
30
- "@remotion/serverless": "4.0.200",
31
- "remotion": "4.0.200"
25
+ "@remotion/bundler": "4.0.202",
26
+ "@remotion/cli": "4.0.202",
27
+ "@remotion/renderer": "4.0.202",
28
+ "@remotion/studio-server": "4.0.202",
29
+ "@remotion/streaming": "4.0.202",
30
+ "remotion": "4.0.202",
31
+ "@remotion/serverless": "4.0.202"
32
32
  },
33
33
  "devDependencies": {
34
34
  "@types/mime-types": "2.1.1",
@@ -37,11 +37,11 @@
37
37
  "pureimage": "0.4.13",
38
38
  "vitest": "0.31.1",
39
39
  "zip-lib": "^0.7.2",
40
- "@remotion/bundler": "4.0.200",
41
- "@remotion/compositor-linux-arm64-gnu": "4.0.200"
40
+ "@remotion/bundler": "4.0.202",
41
+ "@remotion/compositor-linux-arm64-gnu": "4.0.202"
42
42
  },
43
43
  "peerDependencies": {
44
- "@remotion/bundler": "4.0.200"
44
+ "@remotion/bundler": "4.0.202"
45
45
  },
46
46
  "publishConfig": {
47
47
  "access": "public"
Binary file
File without changes
@@ -1 +0,0 @@
1
- "use strict";