@remotion/lambda-client 4.0.417 → 4.0.419
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/cjs/index.js +53 -8
- package/dist/esm/index.mjs +53 -8
- package/package.json +3 -3
package/dist/cjs/index.js
CHANGED
|
@@ -66556,7 +66556,7 @@ var validateDownloadBehavior = (downloadBehavior) => {
|
|
|
66556
66556
|
}
|
|
66557
66557
|
}
|
|
66558
66558
|
};
|
|
66559
|
-
var VERSION = "4.0.
|
|
66559
|
+
var VERSION = "4.0.419";
|
|
66560
66560
|
var isColorSupported = () => {
|
|
66561
66561
|
const env = process.env || {};
|
|
66562
66562
|
const isForceDisabled = "NO_COLOR" in env;
|
|
@@ -67059,6 +67059,17 @@ var calculateChunkTimes = ({
|
|
|
67059
67059
|
}
|
|
67060
67060
|
throw new Error("invalid time for calculate chunk times");
|
|
67061
67061
|
};
|
|
67062
|
+
var checkBucketListing = async ({
|
|
67063
|
+
bucketName,
|
|
67064
|
+
region
|
|
67065
|
+
}) => {
|
|
67066
|
+
try {
|
|
67067
|
+
const res = await fetch(`https://${bucketName}.s3.${region}.amazonaws.com/`);
|
|
67068
|
+
if (res.status === 200) {
|
|
67069
|
+
console.warn(`Warning: Your bucket ${bucketName} allows public listing of its contents. See https://remotion.dev/docs/lambda/bucket-security for how to fix this.`);
|
|
67070
|
+
}
|
|
67071
|
+
} catch {}
|
|
67072
|
+
};
|
|
67062
67073
|
var makeBucketName = (region, providerSpecifics) => {
|
|
67063
67074
|
return `${providerSpecifics.getBucketPrefix()}${region.replace(/-/g, "")}-${providerSpecifics.randomHash()}`;
|
|
67064
67075
|
};
|
|
@@ -67083,6 +67094,7 @@ var internalGetOrCreateBucket = async (params) => {
|
|
|
67083
67094
|
forcePathStyle: params.forcePathStyle,
|
|
67084
67095
|
requestHandler: params.requestHandler
|
|
67085
67096
|
});
|
|
67097
|
+
await checkBucketListing({ bucketName: existingBucketName, region });
|
|
67086
67098
|
return { bucketName: remotionBuckets[0].name, alreadyExisted: true };
|
|
67087
67099
|
}
|
|
67088
67100
|
const bucketName = makeBucketName(params.region, params.providerSpecifics);
|
|
@@ -67091,7 +67103,8 @@ var internalGetOrCreateBucket = async (params) => {
|
|
|
67091
67103
|
region: params.region,
|
|
67092
67104
|
forcePathStyle: params.forcePathStyle,
|
|
67093
67105
|
skipPutAcl: params.skipPutAcl,
|
|
67094
|
-
requestHandler: params.requestHandler
|
|
67106
|
+
requestHandler: params.requestHandler,
|
|
67107
|
+
logLevel: params.logLevel
|
|
67095
67108
|
});
|
|
67096
67109
|
await params.providerSpecifics.applyLifeCycle({
|
|
67097
67110
|
enableFolderExpiry: enableFolderExpiry ?? null,
|
|
@@ -67173,7 +67186,8 @@ var compressInputProps = async ({
|
|
|
67173
67186
|
providerSpecifics,
|
|
67174
67187
|
forcePathStyle,
|
|
67175
67188
|
skipPutAcl,
|
|
67176
|
-
requestHandler
|
|
67189
|
+
requestHandler,
|
|
67190
|
+
logLevel
|
|
67177
67191
|
}) => {
|
|
67178
67192
|
const hash = providerSpecifics.randomHash();
|
|
67179
67193
|
if (needsToUpload) {
|
|
@@ -67184,7 +67198,8 @@ var compressInputProps = async ({
|
|
|
67184
67198
|
providerSpecifics,
|
|
67185
67199
|
forcePathStyle,
|
|
67186
67200
|
skipPutAcl,
|
|
67187
|
-
requestHandler
|
|
67201
|
+
requestHandler,
|
|
67202
|
+
logLevel
|
|
67188
67203
|
})).bucketName;
|
|
67189
67204
|
await providerSpecifics.writeFile({
|
|
67190
67205
|
body: stringifiedInputProps,
|
|
@@ -68961,6 +68976,33 @@ var createBucket = async ({ region, bucketName, forcePathStyle, requestHandler }
|
|
|
68961
68976
|
}
|
|
68962
68977
|
throw err;
|
|
68963
68978
|
}
|
|
68979
|
+
let usedBucketPolicy = false;
|
|
68980
|
+
try {
|
|
68981
|
+
const policy = JSON.stringify({
|
|
68982
|
+
Version: "2012-10-17",
|
|
68983
|
+
Statement: [
|
|
68984
|
+
{
|
|
68985
|
+
Sid: "PublicReadGetObject",
|
|
68986
|
+
Effect: "Allow",
|
|
68987
|
+
Principal: "*",
|
|
68988
|
+
Action: "s3:GetObject",
|
|
68989
|
+
Resource: `arn:aws:s3:::${bucketName}/*`
|
|
68990
|
+
}
|
|
68991
|
+
]
|
|
68992
|
+
});
|
|
68993
|
+
await getS3Client({
|
|
68994
|
+
region,
|
|
68995
|
+
customCredentials: null,
|
|
68996
|
+
forcePathStyle,
|
|
68997
|
+
requestHandler
|
|
68998
|
+
}).send(new import_client_s33.PutBucketPolicyCommand({
|
|
68999
|
+
Bucket: bucketName,
|
|
69000
|
+
Policy: policy
|
|
69001
|
+
}));
|
|
69002
|
+
usedBucketPolicy = true;
|
|
69003
|
+
} catch {
|
|
69004
|
+
console.warn("Could not apply a bucket policy to restrict public access to s3:GetObject only. Falling back to public-read ACL which also allows listing objects. To fix this, add the s3:PutBucketPolicy permission to your IAM user. See https://remotion.dev/docs/lambda/bucket-security");
|
|
69005
|
+
}
|
|
68964
69006
|
try {
|
|
68965
69007
|
await getS3Client({
|
|
68966
69008
|
region,
|
|
@@ -68969,7 +69011,7 @@ var createBucket = async ({ region, bucketName, forcePathStyle, requestHandler }
|
|
|
68969
69011
|
requestHandler
|
|
68970
69012
|
}).send(new import_client_s33.PutBucketAclCommand({
|
|
68971
69013
|
Bucket: bucketName,
|
|
68972
|
-
ACL: "public-read"
|
|
69014
|
+
ACL: usedBucketPolicy ? "private" : "public-read"
|
|
68973
69015
|
}));
|
|
68974
69016
|
} catch (err) {
|
|
68975
69017
|
if (err.message.includes("The bucket does not allow ACLs")) {
|
|
@@ -71047,7 +71089,8 @@ var makeLambdaRenderMediaPayload = async ({
|
|
|
71047
71089
|
providerSpecifics: awsImplementation,
|
|
71048
71090
|
forcePathStyle: forcePathStyle ?? false,
|
|
71049
71091
|
skipPutAcl: privacy === "no-acl",
|
|
71050
|
-
requestHandler: requestHandler ?? null
|
|
71092
|
+
requestHandler: requestHandler ?? null,
|
|
71093
|
+
logLevel
|
|
71051
71094
|
});
|
|
71052
71095
|
return {
|
|
71053
71096
|
rendererFunctionName,
|
|
@@ -71166,7 +71209,8 @@ var makeLambdaRenderStillPayload = async ({
|
|
|
71166
71209
|
providerSpecifics: awsImplementation,
|
|
71167
71210
|
forcePathStyle,
|
|
71168
71211
|
skipPutAcl: privacy === "no-acl",
|
|
71169
|
-
requestHandler
|
|
71212
|
+
requestHandler,
|
|
71213
|
+
logLevel
|
|
71170
71214
|
});
|
|
71171
71215
|
return {
|
|
71172
71216
|
composition,
|
|
@@ -71688,7 +71732,8 @@ var getCompositionsOnLambda = async ({
|
|
|
71688
71732
|
providerSpecifics: awsImplementation,
|
|
71689
71733
|
forcePathStyle: forcePathStyle ?? false,
|
|
71690
71734
|
skipPutAcl: false,
|
|
71691
|
-
requestHandler
|
|
71735
|
+
requestHandler,
|
|
71736
|
+
logLevel: logLevel ?? "info"
|
|
71692
71737
|
});
|
|
71693
71738
|
try {
|
|
71694
71739
|
const res = await awsImplementation.callFunctionSync({
|
package/dist/esm/index.mjs
CHANGED
|
@@ -65899,7 +65899,7 @@ var validateDownloadBehavior = (downloadBehavior) => {
|
|
|
65899
65899
|
}
|
|
65900
65900
|
}
|
|
65901
65901
|
};
|
|
65902
|
-
var VERSION = "4.0.
|
|
65902
|
+
var VERSION = "4.0.419";
|
|
65903
65903
|
var isColorSupported = () => {
|
|
65904
65904
|
const env = process.env || {};
|
|
65905
65905
|
const isForceDisabled = "NO_COLOR" in env;
|
|
@@ -66402,6 +66402,17 @@ var calculateChunkTimes = ({
|
|
|
66402
66402
|
}
|
|
66403
66403
|
throw new Error("invalid time for calculate chunk times");
|
|
66404
66404
|
};
|
|
66405
|
+
var checkBucketListing = async ({
|
|
66406
|
+
bucketName,
|
|
66407
|
+
region
|
|
66408
|
+
}) => {
|
|
66409
|
+
try {
|
|
66410
|
+
const res = await fetch(`https://${bucketName}.s3.${region}.amazonaws.com/`);
|
|
66411
|
+
if (res.status === 200) {
|
|
66412
|
+
console.warn(`Warning: Your bucket ${bucketName} allows public listing of its contents. See https://remotion.dev/docs/lambda/bucket-security for how to fix this.`);
|
|
66413
|
+
}
|
|
66414
|
+
} catch {}
|
|
66415
|
+
};
|
|
66405
66416
|
var makeBucketName = (region, providerSpecifics) => {
|
|
66406
66417
|
return `${providerSpecifics.getBucketPrefix()}${region.replace(/-/g, "")}-${providerSpecifics.randomHash()}`;
|
|
66407
66418
|
};
|
|
@@ -66426,6 +66437,7 @@ var internalGetOrCreateBucket = async (params) => {
|
|
|
66426
66437
|
forcePathStyle: params.forcePathStyle,
|
|
66427
66438
|
requestHandler: params.requestHandler
|
|
66428
66439
|
});
|
|
66440
|
+
await checkBucketListing({ bucketName: existingBucketName, region });
|
|
66429
66441
|
return { bucketName: remotionBuckets[0].name, alreadyExisted: true };
|
|
66430
66442
|
}
|
|
66431
66443
|
const bucketName = makeBucketName(params.region, params.providerSpecifics);
|
|
@@ -66434,7 +66446,8 @@ var internalGetOrCreateBucket = async (params) => {
|
|
|
66434
66446
|
region: params.region,
|
|
66435
66447
|
forcePathStyle: params.forcePathStyle,
|
|
66436
66448
|
skipPutAcl: params.skipPutAcl,
|
|
66437
|
-
requestHandler: params.requestHandler
|
|
66449
|
+
requestHandler: params.requestHandler,
|
|
66450
|
+
logLevel: params.logLevel
|
|
66438
66451
|
});
|
|
66439
66452
|
await params.providerSpecifics.applyLifeCycle({
|
|
66440
66453
|
enableFolderExpiry: enableFolderExpiry ?? null,
|
|
@@ -66516,7 +66529,8 @@ var compressInputProps = async ({
|
|
|
66516
66529
|
providerSpecifics,
|
|
66517
66530
|
forcePathStyle,
|
|
66518
66531
|
skipPutAcl,
|
|
66519
|
-
requestHandler
|
|
66532
|
+
requestHandler,
|
|
66533
|
+
logLevel
|
|
66520
66534
|
}) => {
|
|
66521
66535
|
const hash = providerSpecifics.randomHash();
|
|
66522
66536
|
if (needsToUpload) {
|
|
@@ -66527,7 +66541,8 @@ var compressInputProps = async ({
|
|
|
66527
66541
|
providerSpecifics,
|
|
66528
66542
|
forcePathStyle,
|
|
66529
66543
|
skipPutAcl,
|
|
66530
|
-
requestHandler
|
|
66544
|
+
requestHandler,
|
|
66545
|
+
logLevel
|
|
66531
66546
|
})).bucketName;
|
|
66532
66547
|
await providerSpecifics.writeFile({
|
|
66533
66548
|
body: stringifiedInputProps,
|
|
@@ -68304,6 +68319,33 @@ var createBucket = async ({ region, bucketName, forcePathStyle, requestHandler }
|
|
|
68304
68319
|
}
|
|
68305
68320
|
throw err;
|
|
68306
68321
|
}
|
|
68322
|
+
let usedBucketPolicy = false;
|
|
68323
|
+
try {
|
|
68324
|
+
const policy = JSON.stringify({
|
|
68325
|
+
Version: "2012-10-17",
|
|
68326
|
+
Statement: [
|
|
68327
|
+
{
|
|
68328
|
+
Sid: "PublicReadGetObject",
|
|
68329
|
+
Effect: "Allow",
|
|
68330
|
+
Principal: "*",
|
|
68331
|
+
Action: "s3:GetObject",
|
|
68332
|
+
Resource: `arn:aws:s3:::${bucketName}/*`
|
|
68333
|
+
}
|
|
68334
|
+
]
|
|
68335
|
+
});
|
|
68336
|
+
await getS3Client({
|
|
68337
|
+
region,
|
|
68338
|
+
customCredentials: null,
|
|
68339
|
+
forcePathStyle,
|
|
68340
|
+
requestHandler
|
|
68341
|
+
}).send(new import_client_s33.PutBucketPolicyCommand({
|
|
68342
|
+
Bucket: bucketName,
|
|
68343
|
+
Policy: policy
|
|
68344
|
+
}));
|
|
68345
|
+
usedBucketPolicy = true;
|
|
68346
|
+
} catch {
|
|
68347
|
+
console.warn("Could not apply a bucket policy to restrict public access to s3:GetObject only. Falling back to public-read ACL which also allows listing objects. To fix this, add the s3:PutBucketPolicy permission to your IAM user. See https://remotion.dev/docs/lambda/bucket-security");
|
|
68348
|
+
}
|
|
68307
68349
|
try {
|
|
68308
68350
|
await getS3Client({
|
|
68309
68351
|
region,
|
|
@@ -68312,7 +68354,7 @@ var createBucket = async ({ region, bucketName, forcePathStyle, requestHandler }
|
|
|
68312
68354
|
requestHandler
|
|
68313
68355
|
}).send(new import_client_s33.PutBucketAclCommand({
|
|
68314
68356
|
Bucket: bucketName,
|
|
68315
|
-
ACL: "public-read"
|
|
68357
|
+
ACL: usedBucketPolicy ? "private" : "public-read"
|
|
68316
68358
|
}));
|
|
68317
68359
|
} catch (err) {
|
|
68318
68360
|
if (err.message.includes("The bucket does not allow ACLs")) {
|
|
@@ -70874,7 +70916,8 @@ var makeLambdaRenderMediaPayload = async ({
|
|
|
70874
70916
|
providerSpecifics: awsImplementation,
|
|
70875
70917
|
forcePathStyle: forcePathStyle ?? false,
|
|
70876
70918
|
skipPutAcl: privacy === "no-acl",
|
|
70877
|
-
requestHandler: requestHandler ?? null
|
|
70919
|
+
requestHandler: requestHandler ?? null,
|
|
70920
|
+
logLevel
|
|
70878
70921
|
});
|
|
70879
70922
|
return {
|
|
70880
70923
|
rendererFunctionName,
|
|
@@ -70993,7 +71036,8 @@ var makeLambdaRenderStillPayload = async ({
|
|
|
70993
71036
|
providerSpecifics: awsImplementation,
|
|
70994
71037
|
forcePathStyle,
|
|
70995
71038
|
skipPutAcl: privacy === "no-acl",
|
|
70996
|
-
requestHandler
|
|
71039
|
+
requestHandler,
|
|
71040
|
+
logLevel
|
|
70997
71041
|
});
|
|
70998
71042
|
return {
|
|
70999
71043
|
composition,
|
|
@@ -71515,7 +71559,8 @@ var getCompositionsOnLambda = async ({
|
|
|
71515
71559
|
providerSpecifics: awsImplementation,
|
|
71516
71560
|
forcePathStyle: forcePathStyle ?? false,
|
|
71517
71561
|
skipPutAcl: false,
|
|
71518
|
-
requestHandler
|
|
71562
|
+
requestHandler,
|
|
71563
|
+
logLevel: logLevel ?? "info"
|
|
71519
71564
|
});
|
|
71520
71565
|
try {
|
|
71521
71566
|
const res = await awsImplementation.callFunctionSync({
|
package/package.json
CHANGED
|
@@ -3,7 +3,7 @@
|
|
|
3
3
|
"url": "https://github.com/remotion-dev/remotion/tree/main/packages/lambda-client"
|
|
4
4
|
},
|
|
5
5
|
"name": "@remotion/lambda-client",
|
|
6
|
-
"version": "4.0.
|
|
6
|
+
"version": "4.0.419",
|
|
7
7
|
"main": "dist/cjs/index.js",
|
|
8
8
|
"sideEffects": false,
|
|
9
9
|
"scripts": {
|
|
@@ -26,10 +26,10 @@
|
|
|
26
26
|
"@aws-sdk/lib-storage": "3.936.0",
|
|
27
27
|
"mime-types": "2.1.34",
|
|
28
28
|
"@aws-sdk/credential-provider-ini": "3.936.0",
|
|
29
|
-
"@remotion/serverless-client": "4.0.
|
|
29
|
+
"@remotion/serverless-client": "4.0.419",
|
|
30
30
|
"@types/express": "^5.0.0",
|
|
31
31
|
"express": "4.21.0",
|
|
32
|
-
"@remotion/eslint-config-internal": "4.0.
|
|
32
|
+
"@remotion/eslint-config-internal": "4.0.419",
|
|
33
33
|
"eslint": "9.19.0",
|
|
34
34
|
"next": "16.1.5",
|
|
35
35
|
"@types/mime-types": "2.1.1"
|