@remotion/serverless-client 4.0.416 → 4.0.418
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/check-bucket-listing.d.ts +4 -0
- package/dist/check-bucket-listing.js +16 -0
- package/dist/compress-props.d.ts +3 -1
- package/dist/compress-props.js +2 -1
- package/dist/esm/index.mjs +21 -4
- package/dist/get-or-create-bucket.d.ts +3 -0
- package/dist/get-or-create-bucket.js +3 -0
- package/dist/provider-implementation.d.ts +1 -0
- package/package.json +5 -5
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.checkBucketListing = void 0;
|
|
4
|
+
const checkBucketListing = async ({ bucketName, region, }) => {
|
|
5
|
+
try {
|
|
6
|
+
const res = await fetch(`https://${bucketName}.s3.${region}.amazonaws.com/`);
|
|
7
|
+
if (res.status === 200) {
|
|
8
|
+
// eslint-disable-next-line no-console
|
|
9
|
+
console.warn(`Warning: Your bucket ${bucketName} allows public listing of its contents. See https://remotion.dev/docs/lambda/bucket-security for how to fix this.`);
|
|
10
|
+
}
|
|
11
|
+
}
|
|
12
|
+
catch (_a) {
|
|
13
|
+
// Ignore - best effort check, may fail for non-AWS providers
|
|
14
|
+
}
|
|
15
|
+
};
|
|
16
|
+
exports.checkBucketListing = checkBucketListing;
|
package/dist/compress-props.d.ts
CHANGED
|
@@ -1,3 +1,4 @@
|
|
|
1
|
+
import type { LogLevel } from 'remotion';
|
|
1
2
|
import type { SerializedInputProps } from './constants';
|
|
2
3
|
import type { ProviderSpecifics } from './provider-implementation';
|
|
3
4
|
import type { CloudProvider } from './types';
|
|
@@ -8,7 +9,7 @@ export declare const getNeedsToUpload: <Provider extends CloudProvider>({ type,
|
|
|
8
9
|
sizes: number[];
|
|
9
10
|
providerSpecifics: ProviderSpecifics<Provider>;
|
|
10
11
|
}) => boolean;
|
|
11
|
-
export declare const compressInputProps: <Provider extends CloudProvider>({ stringifiedInputProps, region, userSpecifiedBucketName, propsType, needsToUpload, providerSpecifics, forcePathStyle, skipPutAcl, requestHandler, }: {
|
|
12
|
+
export declare const compressInputProps: <Provider extends CloudProvider>({ stringifiedInputProps, region, userSpecifiedBucketName, propsType, needsToUpload, providerSpecifics, forcePathStyle, skipPutAcl, requestHandler, logLevel, }: {
|
|
12
13
|
stringifiedInputProps: string;
|
|
13
14
|
region: Provider["region"];
|
|
14
15
|
userSpecifiedBucketName: string | null;
|
|
@@ -18,6 +19,7 @@ export declare const compressInputProps: <Provider extends CloudProvider>({ stri
|
|
|
18
19
|
forcePathStyle: boolean;
|
|
19
20
|
skipPutAcl: boolean;
|
|
20
21
|
requestHandler: Provider["requestHandler"] | undefined;
|
|
22
|
+
logLevel: LogLevel;
|
|
21
23
|
}) => Promise<SerializedInputProps>;
|
|
22
24
|
export declare const decompressInputProps: <Provider extends CloudProvider>({ serialized, region, bucketName, expectedBucketOwner, propsType, providerSpecifics, forcePathStyle, requestHandler, }: {
|
|
23
25
|
serialized: SerializedInputProps;
|
package/dist/compress-props.js
CHANGED
|
@@ -40,7 +40,7 @@ const getNeedsToUpload = ({ type, sizes, providerSpecifics, }) => {
|
|
|
40
40
|
return false;
|
|
41
41
|
};
|
|
42
42
|
exports.getNeedsToUpload = getNeedsToUpload;
|
|
43
|
-
const compressInputProps = async ({ stringifiedInputProps, region, userSpecifiedBucketName, propsType, needsToUpload, providerSpecifics, forcePathStyle, skipPutAcl, requestHandler, }) => {
|
|
43
|
+
const compressInputProps = async ({ stringifiedInputProps, region, userSpecifiedBucketName, propsType, needsToUpload, providerSpecifics, forcePathStyle, skipPutAcl, requestHandler, logLevel, }) => {
|
|
44
44
|
const hash = providerSpecifics.randomHash();
|
|
45
45
|
if (needsToUpload) {
|
|
46
46
|
const bucketName = userSpecifiedBucketName !== null && userSpecifiedBucketName !== void 0 ? userSpecifiedBucketName : (await (0, get_or_create_bucket_1.internalGetOrCreateBucket)({
|
|
@@ -51,6 +51,7 @@ const compressInputProps = async ({ stringifiedInputProps, region, userSpecified
|
|
|
51
51
|
forcePathStyle,
|
|
52
52
|
skipPutAcl,
|
|
53
53
|
requestHandler,
|
|
54
|
+
logLevel,
|
|
54
55
|
})).bucketName;
|
|
55
56
|
await providerSpecifics.writeFile({
|
|
56
57
|
body: stringifiedInputProps,
|
package/dist/esm/index.mjs
CHANGED
|
@@ -843,7 +843,7 @@ var validateFramesPerFunction = ({
|
|
|
843
843
|
import * as tty from "tty";
|
|
844
844
|
|
|
845
845
|
// ../core/dist/esm/version.mjs
|
|
846
|
-
var VERSION = "4.0.
|
|
846
|
+
var VERSION = "4.0.418";
|
|
847
847
|
|
|
848
848
|
// ../renderer/dist/esm/error-handling.mjs
|
|
849
849
|
var isColorSupported = () => {
|
|
@@ -1380,6 +1380,19 @@ var calculateChunkTimes = ({
|
|
|
1380
1380
|
}
|
|
1381
1381
|
throw new Error("invalid time for calculate chunk times");
|
|
1382
1382
|
};
|
|
1383
|
+
// src/check-bucket-listing.ts
|
|
1384
|
+
var checkBucketListing = async ({
|
|
1385
|
+
bucketName,
|
|
1386
|
+
region
|
|
1387
|
+
}) => {
|
|
1388
|
+
try {
|
|
1389
|
+
const res = await fetch(`https://${bucketName}.s3.${region}.amazonaws.com/`);
|
|
1390
|
+
if (res.status === 200) {
|
|
1391
|
+
console.warn(`Warning: Your bucket ${bucketName} allows public listing of its contents. See https://remotion.dev/docs/lambda/bucket-security for how to fix this.`);
|
|
1392
|
+
}
|
|
1393
|
+
} catch {}
|
|
1394
|
+
};
|
|
1395
|
+
|
|
1383
1396
|
// src/make-bucket-name.ts
|
|
1384
1397
|
var makeBucketName = (region, providerSpecifics) => {
|
|
1385
1398
|
return `${providerSpecifics.getBucketPrefix()}${region.replace(/-/g, "")}-${providerSpecifics.randomHash()}`;
|
|
@@ -1407,6 +1420,7 @@ var internalGetOrCreateBucket = async (params) => {
|
|
|
1407
1420
|
forcePathStyle: params.forcePathStyle,
|
|
1408
1421
|
requestHandler: params.requestHandler
|
|
1409
1422
|
});
|
|
1423
|
+
await checkBucketListing({ bucketName: existingBucketName, region });
|
|
1410
1424
|
return { bucketName: remotionBuckets[0].name, alreadyExisted: true };
|
|
1411
1425
|
}
|
|
1412
1426
|
const bucketName = makeBucketName(params.region, params.providerSpecifics);
|
|
@@ -1415,7 +1429,8 @@ var internalGetOrCreateBucket = async (params) => {
|
|
|
1415
1429
|
region: params.region,
|
|
1416
1430
|
forcePathStyle: params.forcePathStyle,
|
|
1417
1431
|
skipPutAcl: params.skipPutAcl,
|
|
1418
|
-
requestHandler: params.requestHandler
|
|
1432
|
+
requestHandler: params.requestHandler,
|
|
1433
|
+
logLevel: params.logLevel
|
|
1419
1434
|
});
|
|
1420
1435
|
await params.providerSpecifics.applyLifeCycle({
|
|
1421
1436
|
enableFolderExpiry: enableFolderExpiry ?? null,
|
|
@@ -1505,7 +1520,8 @@ var compressInputProps = async ({
|
|
|
1505
1520
|
providerSpecifics,
|
|
1506
1521
|
forcePathStyle,
|
|
1507
1522
|
skipPutAcl,
|
|
1508
|
-
requestHandler
|
|
1523
|
+
requestHandler,
|
|
1524
|
+
logLevel
|
|
1509
1525
|
}) => {
|
|
1510
1526
|
const hash = providerSpecifics.randomHash();
|
|
1511
1527
|
if (needsToUpload) {
|
|
@@ -1516,7 +1532,8 @@ var compressInputProps = async ({
|
|
|
1516
1532
|
providerSpecifics,
|
|
1517
1533
|
forcePathStyle,
|
|
1518
1534
|
skipPutAcl,
|
|
1519
|
-
requestHandler
|
|
1535
|
+
requestHandler,
|
|
1536
|
+
logLevel
|
|
1520
1537
|
})).bucketName;
|
|
1521
1538
|
await providerSpecifics.writeFile({
|
|
1522
1539
|
body: stringifiedInputProps,
|
|
@@ -1,3 +1,4 @@
|
|
|
1
|
+
import type { LogLevel } from 'remotion';
|
|
1
2
|
import type { CustomCredentials } from './constants';
|
|
2
3
|
import type { ProviderSpecifics } from './provider-implementation';
|
|
3
4
|
import type { CloudProvider } from './types';
|
|
@@ -8,6 +9,7 @@ type GetOrCreateBucketInputInner<Provider extends CloudProvider> = {
|
|
|
8
9
|
providerSpecifics: ProviderSpecifics<Provider>;
|
|
9
10
|
forcePathStyle: boolean;
|
|
10
11
|
skipPutAcl: boolean;
|
|
12
|
+
logLevel: LogLevel;
|
|
11
13
|
requestHandler: Provider['requestHandler'] | null;
|
|
12
14
|
};
|
|
13
15
|
export type GetOrCreateBucketInput<Provider extends CloudProvider> = {
|
|
@@ -16,6 +18,7 @@ export type GetOrCreateBucketInput<Provider extends CloudProvider> = {
|
|
|
16
18
|
customCredentials?: CustomCredentials<Provider>;
|
|
17
19
|
forcePathStyle?: boolean;
|
|
18
20
|
requestHandler?: Provider['requestHandler'];
|
|
21
|
+
logLevel?: LogLevel;
|
|
19
22
|
};
|
|
20
23
|
export type GetOrCreateBucketOutput = {
|
|
21
24
|
bucketName: string;
|
|
@@ -1,6 +1,7 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
3
|
exports.internalGetOrCreateBucket = void 0;
|
|
4
|
+
const check_bucket_listing_1 = require("./check-bucket-listing");
|
|
4
5
|
const make_bucket_name_1 = require("./make-bucket-name");
|
|
5
6
|
const internalGetOrCreateBucket = async (params) => {
|
|
6
7
|
const remotionBuckets = await params.providerSpecifics.getBuckets({
|
|
@@ -24,6 +25,7 @@ const internalGetOrCreateBucket = async (params) => {
|
|
|
24
25
|
forcePathStyle: params.forcePathStyle,
|
|
25
26
|
requestHandler: params.requestHandler,
|
|
26
27
|
});
|
|
28
|
+
await (0, check_bucket_listing_1.checkBucketListing)({ bucketName: existingBucketName, region });
|
|
27
29
|
return { bucketName: remotionBuckets[0].name, alreadyExisted: true };
|
|
28
30
|
}
|
|
29
31
|
const bucketName = (0, make_bucket_name_1.makeBucketName)(params.region, params.providerSpecifics);
|
|
@@ -33,6 +35,7 @@ const internalGetOrCreateBucket = async (params) => {
|
|
|
33
35
|
forcePathStyle: params.forcePathStyle,
|
|
34
36
|
skipPutAcl: params.skipPutAcl,
|
|
35
37
|
requestHandler: params.requestHandler,
|
|
38
|
+
logLevel: params.logLevel,
|
|
36
39
|
});
|
|
37
40
|
// apply to newly created bucket
|
|
38
41
|
await params.providerSpecifics.applyLifeCycle({
|
|
@@ -52,6 +52,7 @@ type CreateBucket<Provider extends CloudProvider> = (params: {
|
|
|
52
52
|
forcePathStyle: boolean;
|
|
53
53
|
skipPutAcl: boolean;
|
|
54
54
|
requestHandler: Provider['requestHandler'] | null;
|
|
55
|
+
logLevel: LogLevel;
|
|
55
56
|
}) => Promise<void>;
|
|
56
57
|
type ApplyLifeCycle<Provider extends CloudProvider> = (params: {
|
|
57
58
|
enableFolderExpiry: boolean | null;
|
package/package.json
CHANGED
|
@@ -3,7 +3,7 @@
|
|
|
3
3
|
"url": "https://github.com/remotion-dev/remotion/tree/main/packages/serverless-client"
|
|
4
4
|
},
|
|
5
5
|
"name": "@remotion/serverless-client",
|
|
6
|
-
"version": "4.0.
|
|
6
|
+
"version": "4.0.418",
|
|
7
7
|
"main": "dist",
|
|
8
8
|
"sideEffects": false,
|
|
9
9
|
"scripts": {
|
|
@@ -23,10 +23,10 @@
|
|
|
23
23
|
},
|
|
24
24
|
"dependencies": {},
|
|
25
25
|
"devDependencies": {
|
|
26
|
-
"remotion": "4.0.
|
|
27
|
-
"@remotion/streaming": "4.0.
|
|
28
|
-
"@remotion/renderer": "4.0.
|
|
29
|
-
"@remotion/eslint-config-internal": "4.0.
|
|
26
|
+
"remotion": "4.0.418",
|
|
27
|
+
"@remotion/streaming": "4.0.418",
|
|
28
|
+
"@remotion/renderer": "4.0.418",
|
|
29
|
+
"@remotion/eslint-config-internal": "4.0.418",
|
|
30
30
|
"eslint": "9.19.0"
|
|
31
31
|
},
|
|
32
32
|
"exports": {
|