@strapi/provider-upload-aws-s3 4.15.1 → 4.15.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.d.ts +25 -13
- package/dist/index.d.ts.map +1 -1
- package/dist/index.js +83 -77
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +83 -75
- package/dist/index.mjs.map +1 -1
- package/dist/utils.d.ts +3 -0
- package/dist/utils.d.ts.map +1 -1
- package/package.json +9 -6
package/dist/index.d.ts
CHANGED
|
@@ -1,8 +1,9 @@
|
|
|
1
1
|
/// <reference types="node" />
|
|
2
2
|
/// <reference types="node" />
|
|
3
3
|
import type { ReadStream } from 'node:fs';
|
|
4
|
-
import
|
|
5
|
-
|
|
4
|
+
import { DeleteObjectCommandOutput, CompleteMultipartUploadCommandOutput, AbortMultipartUploadCommandOutput, S3ClientConfig, ObjectCannedACL } from '@aws-sdk/client-s3';
|
|
5
|
+
import type { AwsCredentialIdentity } from '@aws-sdk/types';
|
|
6
|
+
export interface File {
|
|
6
7
|
name: string;
|
|
7
8
|
alternativeText?: string;
|
|
8
9
|
caption?: string;
|
|
@@ -21,26 +22,37 @@ interface File {
|
|
|
21
22
|
stream?: ReadStream;
|
|
22
23
|
buffer?: Buffer;
|
|
23
24
|
}
|
|
24
|
-
|
|
25
|
+
export type UploadCommandOutput = (CompleteMultipartUploadCommandOutput | AbortMultipartUploadCommandOutput) & {
|
|
26
|
+
Location: string;
|
|
27
|
+
};
|
|
28
|
+
export interface AWSParams {
|
|
29
|
+
Bucket: string;
|
|
30
|
+
ACL?: ObjectCannedACL;
|
|
31
|
+
signedUrlExpires?: number;
|
|
32
|
+
}
|
|
33
|
+
export interface DefaultOptions extends S3ClientConfig {
|
|
34
|
+
accessKeyId?: AwsCredentialIdentity['accessKeyId'];
|
|
35
|
+
secretAccessKey?: AwsCredentialIdentity['secretAccessKey'];
|
|
36
|
+
credentials?: AwsCredentialIdentity;
|
|
37
|
+
params?: AWSParams;
|
|
38
|
+
[k: string]: any;
|
|
39
|
+
}
|
|
40
|
+
export type InitOptions = (DefaultOptions | {
|
|
41
|
+
s3Options: DefaultOptions;
|
|
42
|
+
}) & {
|
|
25
43
|
baseUrl?: string;
|
|
26
44
|
rootPath?: string;
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
Bucket: string;
|
|
30
|
-
ACL?: string;
|
|
31
|
-
signedUrlExpires?: string;
|
|
32
|
-
};
|
|
33
|
-
};
|
|
34
|
-
}
|
|
45
|
+
[k: string]: any;
|
|
46
|
+
};
|
|
35
47
|
declare const _default: {
|
|
36
48
|
init({ baseUrl, rootPath, s3Options, ...legacyS3Options }: InitOptions): {
|
|
37
49
|
isPrivate(): boolean;
|
|
38
|
-
getSignedUrl(file: File): Promise<{
|
|
50
|
+
getSignedUrl(file: File, customParams: any): Promise<{
|
|
39
51
|
url: string;
|
|
40
52
|
}>;
|
|
41
53
|
uploadStream(file: File, customParams?: {}): Promise<void>;
|
|
42
54
|
upload(file: File, customParams?: {}): Promise<void>;
|
|
43
|
-
delete(file: File, customParams?: {}): Promise<
|
|
55
|
+
delete(file: File, customParams?: {}): Promise<DeleteObjectCommandOutput>;
|
|
44
56
|
};
|
|
45
57
|
};
|
|
46
58
|
export default _default;
|
package/dist/index.d.ts.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":";;AAAA,OAAO,KAAK,EAAE,UAAU,EAAE,MAAM,SAAS,CAAC;AAE1C,OAAO,
|
|
1
|
+
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":";;AAAA,OAAO,KAAK,EAAE,UAAU,EAAE,MAAM,SAAS,CAAC;AAE1C,OAAO,EAIL,yBAAyB,EAEzB,oCAAoC,EACpC,iCAAiC,EACjC,cAAc,EACd,eAAe,EAChB,MAAM,oBAAoB,CAAC;AAC5B,OAAO,KAAK,EAAE,qBAAqB,EAAE,MAAM,gBAAgB,CAAC;AAK5D,MAAM,WAAW,IAAI;IACnB,IAAI,EAAE,MAAM,CAAC;IACb,eAAe,CAAC,EAAE,MAAM,CAAC;IACzB,OAAO,CAAC,EAAE,MAAM,CAAC;IACjB,KAAK,CAAC,EAAE,MAAM,CAAC;IACf,MAAM,CAAC,EAAE,MAAM,CAAC;IAChB,OAAO,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;IAClC,IAAI,EAAE,MAAM,CAAC;IACb,GAAG,CAAC,EAAE,MAAM,CAAC;IACb,IAAI,EAAE,MAAM,CAAC;IACb,IAAI,EAAE,MAAM,CAAC;IACb,GAAG,EAAE,MAAM,CAAC;IACZ,UAAU,CAAC,EAAE,MAAM,CAAC;IACpB,IAAI,CAAC,EAAE,MAAM,CAAC;IACd,QAAQ,CAAC,EAAE,MAAM,CAAC;IAClB,iBAAiB,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;IAC5C,MAAM,CAAC,EAAE,UAAU,CAAC;IACpB,MAAM,CAAC,EAAE,MAAM,CAAC;CACjB;AAED,MAAM,MAAM,mBAAmB,GAAG,CAC9B,oCAAoC,GACpC,iCAAiC,CACpC,GAAG;IACF,QAAQ,EAAE,MAAM,CAAC;CAClB,CAAC;AAEF,MAAM,WAAW,SAAS;IACxB,MAAM,EAAE,MAAM,CAAC;IACf,GAAG,CAAC,EAAE,eAAe,CAAC;IACtB,gBAAgB,CAAC,EAAE,MAAM,CAAC;CAC3B;AAED,MAAM,WAAW,cAAe,SAAQ,cAAc;IAEpD,WAAW,CAAC,EAAE,qBAAqB,CAAC,aAAa,CAAC,CAAC;IACnD,eAAe,CAAC,EAAE,qBAAqB,CAAC,iBAAiB,CAAC,CAAC;IAE3D,WAAW,CAAC,EAAE,qBAAqB,CAAC;IACpC,MAAM,CAAC,EAAE,SAAS,CAAC;IACnB,CAAC,CAAC,EAAE,MAAM,GAAG,GAAG,CAAC;CAClB;AAED,MAAM,MAAM,WAAW,GAAG,CAAC,cAAc,GAAG;IAAE,SAAS,EAAE,cAAc,CAAA;CAAE,CAAC,GAAG;IAC3E,OAAO,CAAC,EAAE,MAAM,CAAC;IACjB,QAAQ,CAAC,EAAE,MAAM,CAAC;IAClB,CAAC,CAAC,EAAE,MAAM,GAAG,GAAG,CAAC;CAClB,CAAC;;+DA0B2D,WAAW;;2BAwCzC,IAAI,gBAAgB,GAAG;iBAAkB,MAAM;;2BAqBrD,IAAI;qBAGV,IAAI;qBAGJ,IAAI,sBAAsB,QAAQ,yBAAyB,CAAC;;;AApE/E,wBA8EE"}
|
package/dist/index.js
CHANGED
|
@@ -1,8 +1,8 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
const fp = require("lodash/fp");
|
|
3
|
-
const
|
|
4
|
-
const
|
|
5
|
-
const
|
|
3
|
+
const clientS3 = require("@aws-sdk/client-s3");
|
|
4
|
+
const s3RequestPresigner = require("@aws-sdk/s3-request-presigner");
|
|
5
|
+
const libStorage = require("@aws-sdk/lib-storage");
|
|
6
6
|
const ENDPOINT_PATTERN = /^(.+\.)?s3[.-]([a-z0-9-]+)\./;
|
|
7
7
|
function isUrlFromBucket(fileUrl, bucketName, baseUrl = "") {
|
|
8
8
|
const url = new URL(fileUrl);
|
|
@@ -47,81 +47,97 @@ function getBucketFromAwsUrl(fileUrl) {
|
|
|
47
47
|
}
|
|
48
48
|
return { bucket: prefix.substring(0, prefix.length - 1) };
|
|
49
49
|
}
|
|
50
|
-
|
|
51
|
-
|
|
50
|
+
const extractCredentials = (options) => {
|
|
51
|
+
if (options.accessKeyId && options.secretAccessKey) {
|
|
52
|
+
return {
|
|
53
|
+
accessKeyId: options.accessKeyId,
|
|
54
|
+
secretAccessKey: options.secretAccessKey
|
|
55
|
+
};
|
|
56
|
+
}
|
|
57
|
+
if (options.s3Options?.accessKeyId && options.s3Options.secretAccessKey) {
|
|
58
|
+
process.emitWarning(
|
|
59
|
+
"Credentials passed directly to s3Options is deprecated and will be removed in a future release. Please wrap them inside a credentials object."
|
|
60
|
+
);
|
|
61
|
+
return {
|
|
62
|
+
accessKeyId: options.s3Options.accessKeyId,
|
|
63
|
+
secretAccessKey: options.s3Options.secretAccessKey
|
|
64
|
+
};
|
|
65
|
+
}
|
|
66
|
+
if (options.s3Options?.credentials) {
|
|
67
|
+
return {
|
|
68
|
+
accessKeyId: options.s3Options.credentials.accessKeyId,
|
|
69
|
+
secretAccessKey: options.s3Options.credentials.secretAccessKey
|
|
70
|
+
};
|
|
71
|
+
}
|
|
72
|
+
throw new Error("Couldn't find AWS credentials.");
|
|
73
|
+
};
|
|
74
|
+
const assertUrlProtocol = (url) => {
|
|
52
75
|
return /^\w*:\/\//.test(url);
|
|
53
|
-
}
|
|
76
|
+
};
|
|
77
|
+
const getConfig = ({ baseUrl, rootPath, s3Options, ...legacyS3Options }) => {
|
|
78
|
+
if (Object.keys(legacyS3Options).length > 0) {
|
|
79
|
+
process.emitWarning(
|
|
80
|
+
"S3 configuration options passed at root level of the plugin's providerOptions is deprecated and will be removed in a future release. Please wrap them inside the 's3Options:{}' property."
|
|
81
|
+
);
|
|
82
|
+
}
|
|
83
|
+
const config = {
|
|
84
|
+
...s3Options,
|
|
85
|
+
...legacyS3Options,
|
|
86
|
+
credentials: extractCredentials({ s3Options, ...legacyS3Options })
|
|
87
|
+
};
|
|
88
|
+
config.params.ACL = fp.getOr(clientS3.ObjectCannedACL.public_read, ["params", "ACL"], config);
|
|
89
|
+
return config;
|
|
90
|
+
};
|
|
54
91
|
const index = {
|
|
55
92
|
init({ baseUrl, rootPath, s3Options, ...legacyS3Options }) {
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
"S3 configuration options passed at root level of the plugin's providerOptions is deprecated and will be removed in a future release. Please wrap them inside the 's3Options:{}' property."
|
|
59
|
-
);
|
|
60
|
-
}
|
|
61
|
-
const config = { ...s3Options, ...legacyS3Options };
|
|
62
|
-
const S3 = new AWS__default.default.S3({
|
|
63
|
-
apiVersion: "2006-03-01",
|
|
64
|
-
...config
|
|
65
|
-
});
|
|
93
|
+
const config = getConfig({ baseUrl, rootPath, s3Options, ...legacyS3Options });
|
|
94
|
+
const s3Client = new clientS3.S3Client(config);
|
|
66
95
|
const filePrefix = rootPath ? `${rootPath.replace(/\/+$/, "")}/` : "";
|
|
67
96
|
const getFileKey = (file) => {
|
|
68
97
|
const path = file.path ? `${file.path}/` : "";
|
|
69
98
|
return `${filePrefix}${path}${file.hash}${file.ext}`;
|
|
70
99
|
};
|
|
71
|
-
const
|
|
72
|
-
const upload = (file, customParams = {}) => new Promise((resolve, reject) => {
|
|
100
|
+
const upload = async (file, customParams = {}) => {
|
|
73
101
|
const fileKey = getFileKey(file);
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
ContentType: file.mime,
|
|
84
|
-
...customParams
|
|
85
|
-
};
|
|
86
|
-
const onUploaded = (err, data) => {
|
|
87
|
-
if (err) {
|
|
88
|
-
return reject(err);
|
|
89
|
-
}
|
|
90
|
-
if (baseUrl) {
|
|
91
|
-
file.url = `${baseUrl}/${fileKey}`;
|
|
92
|
-
} else {
|
|
93
|
-
file.url = hasUrlProtocol(data.Location) ? data.Location : `https://${data.Location}`;
|
|
102
|
+
const uploadObj = new libStorage.Upload({
|
|
103
|
+
client: s3Client,
|
|
104
|
+
params: {
|
|
105
|
+
Bucket: config.params.Bucket,
|
|
106
|
+
Key: fileKey,
|
|
107
|
+
Body: file.stream || Buffer.from(file.buffer, "binary"),
|
|
108
|
+
ACL: config.params.ACL,
|
|
109
|
+
ContentType: file.mime,
|
|
110
|
+
...customParams
|
|
94
111
|
}
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
|
|
112
|
+
});
|
|
113
|
+
const upload2 = await uploadObj.done();
|
|
114
|
+
if (assertUrlProtocol(upload2.Location)) {
|
|
115
|
+
file.url = baseUrl ? `${baseUrl}/${fileKey}` : upload2.Location;
|
|
116
|
+
} else {
|
|
117
|
+
file.url = `https://${upload2.Location}`;
|
|
118
|
+
}
|
|
119
|
+
};
|
|
99
120
|
return {
|
|
100
121
|
isPrivate() {
|
|
101
|
-
return ACL === "private";
|
|
122
|
+
return config.params.ACL === "private";
|
|
102
123
|
},
|
|
103
|
-
async getSignedUrl(file) {
|
|
124
|
+
async getSignedUrl(file, customParams) {
|
|
104
125
|
if (!isUrlFromBucket(file.url, config.params.Bucket, baseUrl)) {
|
|
105
126
|
return { url: file.url };
|
|
106
127
|
}
|
|
107
|
-
const
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
|
|
120
|
-
}
|
|
121
|
-
resolve({ url });
|
|
122
|
-
}
|
|
123
|
-
);
|
|
124
|
-
});
|
|
128
|
+
const fileKey = getFileKey(file);
|
|
129
|
+
const url = await s3RequestPresigner.getSignedUrl(
|
|
130
|
+
s3Client,
|
|
131
|
+
new clientS3.GetObjectCommand({
|
|
132
|
+
Bucket: config.params.Bucket,
|
|
133
|
+
Key: fileKey,
|
|
134
|
+
...customParams
|
|
135
|
+
}),
|
|
136
|
+
{
|
|
137
|
+
expiresIn: fp.getOr(15 * 60, ["params", "signedUrlExpires"], config)
|
|
138
|
+
}
|
|
139
|
+
);
|
|
140
|
+
return { url };
|
|
125
141
|
},
|
|
126
142
|
uploadStream(file, customParams = {}) {
|
|
127
143
|
return upload(file, customParams);
|
|
@@ -130,22 +146,12 @@ const index = {
|
|
|
130
146
|
return upload(file, customParams);
|
|
131
147
|
},
|
|
132
148
|
delete(file, customParams = {}) {
|
|
133
|
-
|
|
134
|
-
|
|
135
|
-
|
|
136
|
-
|
|
137
|
-
Key: fileKey,
|
|
138
|
-
Bucket: config.params.Bucket,
|
|
139
|
-
...customParams
|
|
140
|
-
},
|
|
141
|
-
(err) => {
|
|
142
|
-
if (err) {
|
|
143
|
-
return reject(err);
|
|
144
|
-
}
|
|
145
|
-
resolve();
|
|
146
|
-
}
|
|
147
|
-
);
|
|
149
|
+
const command = new clientS3.DeleteObjectCommand({
|
|
150
|
+
Bucket: config.params.Bucket,
|
|
151
|
+
Key: getFileKey(file),
|
|
152
|
+
...customParams
|
|
148
153
|
});
|
|
154
|
+
return s3Client.send(command);
|
|
149
155
|
}
|
|
150
156
|
};
|
|
151
157
|
}
|
package/dist/index.js.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"index.js","sources":["../src/utils.ts","../src/index.ts"],"sourcesContent":["const ENDPOINT_PATTERN = /^(.+\\.)?s3[.-]([a-z0-9-]+)\\./;\n\ninterface BucketInfo {\n bucket?: string | null;\n err?: string;\n}\n\nexport function isUrlFromBucket(fileUrl: string, bucketName: string, baseUrl = ''): boolean {\n const url = new URL(fileUrl);\n\n // Check if the file URL is using a base URL (e.g. a CDN).\n // In this case do not sign the URL.\n if (baseUrl) {\n return false;\n }\n\n const { bucket } = getBucketFromAwsUrl(fileUrl);\n\n if (bucket) {\n return bucket === bucketName;\n }\n\n // File URL might be of an S3-compatible provider. (or an invalid URL)\n // In this case, check if the bucket name appears in the URL host or path.\n // e.g. https://minio.example.com/bucket-name/object-key\n // e.g. https://bucket.nyc3.digitaloceanspaces.com/folder/img.png\n return url.host.startsWith(`${bucketName}.`) || url.pathname.includes(`/${bucketName}/`);\n}\n\n/**\n * Parse the bucket name from a URL.\n * See all URL formats in https://docs.aws.amazon.com/AmazonS3/latest/userguide/access-bucket-intro.html\n *\n * @param {string} fileUrl - the URL to parse\n * @returns {object} result\n * @returns {string} result.bucket - the bucket name\n * @returns {string} result.err - if any\n */\nfunction getBucketFromAwsUrl(fileUrl: string): BucketInfo {\n const url = new URL(fileUrl);\n\n // S3://<bucket-name>/<key>\n if (url.protocol === 's3:') {\n const bucket = url.host;\n\n if (!bucket) {\n return { err: `Invalid S3 url: no bucket: ${url}` };\n }\n return { bucket };\n }\n\n if (!url.host) {\n return { err: `Invalid S3 url: no hostname: ${url}` };\n }\n\n const matches = url.host.match(ENDPOINT_PATTERN);\n if (!matches) {\n return { err: `Invalid S3 url: hostname does not appear to be a valid S3 endpoint: ${url}` };\n }\n\n const prefix = matches[1];\n // https://s3.amazonaws.com/<bucket-name>\n if (!prefix) {\n if (url.pathname === '/') {\n return { bucket: null };\n }\n\n const index = url.pathname.indexOf('/', 1);\n\n // https://s3.amazonaws.com/<bucket-name>\n if (index === -1) {\n return { bucket: url.pathname.substring(1) };\n }\n\n // https://s3.amazonaws.com/<bucket-name>/\n if (index === url.pathname.length - 1) {\n return { bucket: url.pathname.substring(1, index) };\n }\n\n // https://s3.amazonaws.com/<bucket-name>/key\n return { bucket: url.pathname.substring(1, index) };\n }\n\n // https://<bucket-name>.s3.amazonaws.com/\n return { bucket: prefix.substring(0, prefix.length - 1) };\n}\n","import type { ReadStream } from 'node:fs';\nimport { getOr } from 'lodash/fp';\nimport AWS from 'aws-sdk';\nimport { isUrlFromBucket } from './utils';\n\ninterface File {\n name: string;\n alternativeText?: string;\n caption?: string;\n width?: number;\n height?: number;\n formats?: Record<string, unknown>;\n hash: string;\n ext?: string;\n mime: string;\n size: number;\n url: string;\n previewUrl?: string;\n path?: string;\n provider?: string;\n provider_metadata?: Record<string, unknown>;\n stream?: ReadStream;\n buffer?: Buffer;\n}\n\n// TODO V5: Migrate to aws-sdk v3\n// eslint-disable-next-line @typescript-eslint/no-var-requires\nrequire('aws-sdk/lib/maintenance_mode_message').suppress = true;\n\nfunction hasUrlProtocol(url: string) {\n // Regex to test protocol like \"http://\", \"https://\"\n return /^\\w*:\\/\\//.test(url);\n}\n\ninterface InitOptions extends Partial<AWS.S3.ClientConfiguration> {\n baseUrl?: string;\n rootPath?: string;\n s3Options: AWS.S3.ClientConfiguration & {\n params: {\n Bucket: string; // making it required\n ACL?: string;\n signedUrlExpires?: string;\n };\n };\n}\n\nexport default {\n init({ baseUrl, rootPath, s3Options, ...legacyS3Options }: InitOptions) {\n if (Object.keys(legacyS3Options).length > 0) {\n process.emitWarning(\n \"S3 configuration options passed at root level of the plugin's providerOptions is deprecated and will be removed in a future release. Please wrap them inside the 's3Options:{}' property.\"\n );\n }\n\n const config = { ...s3Options, ...legacyS3Options };\n\n const S3 = new AWS.S3({\n apiVersion: '2006-03-01',\n ...config,\n });\n\n const filePrefix = rootPath ? `${rootPath.replace(/\\/+$/, '')}/` : '';\n\n const getFileKey = (file: File) => {\n const path = file.path ? `${file.path}/` : '';\n\n return `${filePrefix}${path}${file.hash}${file.ext}`;\n };\n\n const ACL = getOr('public-read', ['params', 'ACL'], config);\n\n const upload = (file: File, customParams = {}): Promise<void> =>\n new Promise((resolve, reject) => {\n const fileKey = getFileKey(file);\n\n if (!file.stream && !file.buffer) {\n reject(new Error('Missing file stream or buffer'));\n return;\n }\n\n const params = {\n Key: fileKey,\n Bucket: config.params.Bucket,\n Body: file.stream || file.buffer,\n ACL,\n ContentType: file.mime,\n ...customParams,\n };\n\n const onUploaded = (err: Error, data: AWS.S3.ManagedUpload.SendData) => {\n if (err) {\n return reject(err);\n }\n\n // set the bucket file url\n if (baseUrl) {\n // Construct the url with the baseUrl\n file.url = `${baseUrl}/${fileKey}`;\n } else {\n // Add the protocol if it is missing\n // Some providers like DigitalOcean Spaces return the url without the protocol\n file.url = hasUrlProtocol(data.Location) ? data.Location : `https://${data.Location}`;\n }\n resolve();\n };\n\n S3.upload(params, onUploaded);\n });\n\n return {\n isPrivate() {\n return ACL === 'private';\n },\n async getSignedUrl(file: File): Promise<{ url: string }> {\n // Do not sign the url if it does not come from the same bucket.\n if (!isUrlFromBucket(file.url, config.params.Bucket, baseUrl)) {\n return { url: file.url };\n }\n\n const signedUrlExpires: string = getOr(15 * 60, ['params', 'signedUrlExpires'], config); // 15 minutes\n\n return new Promise((resolve, reject) => {\n const fileKey = getFileKey(file);\n\n S3.getSignedUrl(\n 'getObject',\n {\n Bucket: config.params.Bucket,\n Key: fileKey,\n Expires: parseInt(signedUrlExpires, 10),\n },\n (err, url) => {\n if (err) {\n return reject(err);\n }\n resolve({ url });\n }\n );\n });\n },\n uploadStream(file: File, customParams = {}) {\n return upload(file, customParams);\n },\n upload(file: File, customParams = {}) {\n return upload(file, customParams);\n },\n delete(file: File, customParams = {}): Promise<void> {\n return new Promise((resolve, reject) => {\n // delete file on S3 bucket\n const fileKey = getFileKey(file);\n S3.deleteObject(\n {\n Key: fileKey,\n Bucket: config.params.Bucket,\n ...customParams,\n },\n (err) => {\n if (err) {\n return reject(err);\n }\n\n resolve();\n }\n );\n });\n },\n };\n },\n};\n"],"names":["index","AWS","getOr"],"mappings":";;;;;AAAA,MAAM,mBAAmB;AAOlB,SAAS,gBAAgB,SAAiB,YAAoB,UAAU,IAAa;AACpF,QAAA,MAAM,IAAI,IAAI,OAAO;AAI3B,MAAI,SAAS;AACJ,WAAA;AAAA,EACT;AAEA,QAAM,EAAE,OAAA,IAAW,oBAAoB,OAAO;AAE9C,MAAI,QAAQ;AACV,WAAO,WAAW;AAAA,EACpB;AAMA,SAAO,IAAI,KAAK,WAAW,GAAG,UAAU,GAAG,KAAK,IAAI,SAAS,SAAS,IAAI,UAAU,GAAG;AACzF;AAWA,SAAS,oBAAoB,SAA6B;AAClD,QAAA,MAAM,IAAI,IAAI,OAAO;AAGvB,MAAA,IAAI,aAAa,OAAO;AAC1B,UAAM,SAAS,IAAI;AAEnB,QAAI,CAAC,QAAQ;AACX,aAAO,EAAE,KAAK,8BAA8B,GAAG,GAAG;AAAA,IACpD;AACA,WAAO,EAAE,OAAO;AAAA,EAClB;AAEI,MAAA,CAAC,IAAI,MAAM;AACb,WAAO,EAAE,KAAK,gCAAgC,GAAG,GAAG;AAAA,EACtD;AAEA,QAAM,UAAU,IAAI,KAAK,MAAM,gBAAgB;AAC/C,MAAI,CAAC,SAAS;AACZ,WAAO,EAAE,KAAK,uEAAuE,GAAG,GAAG;AAAA,EAC7F;AAEM,QAAA,SAAS,QAAQ,CAAC;AAExB,MAAI,CAAC,QAAQ;AACP,QAAA,IAAI,aAAa,KAAK;AACjB,aAAA,EAAE,QAAQ;IACnB;AAEA,UAAMA,SAAQ,IAAI,SAAS,QAAQ,KAAK,CAAC;AAGzC,QAAIA,WAAU,IAAI;AAChB,aAAO,EAAE,QAAQ,IAAI,SAAS,UAAU,CAAC;IAC3C;AAGA,QAAIA,WAAU,IAAI,SAAS,SAAS,GAAG;AACrC,aAAO,EAAE,QAAQ,IAAI,SAAS,UAAU,GAAGA,MAAK;IAClD;AAGA,WAAO,EAAE,QAAQ,IAAI,SAAS,UAAU,GAAGA,MAAK;EAClD;AAGO,SAAA,EAAE,QAAQ,OAAO,UAAU,GAAG,OAAO,SAAS,CAAC;AACxD;AC1DA,QAAQ,sCAAsC,EAAE,WAAW;AAE3D,SAAS,eAAe,KAAa;AAE5B,SAAA,YAAY,KAAK,GAAG;AAC7B;AAcA,MAAe,QAAA;AAAA,EACb,KAAK,EAAE,SAAS,UAAU,WAAW,GAAG,mBAAgC;AACtE,QAAI,OAAO,KAAK,eAAe,EAAE,SAAS,GAAG;AACnC,cAAA;AAAA,QACN;AAAA,MAAA;AAAA,IAEJ;AAEA,UAAM,SAAS,EAAE,GAAG,WAAW,GAAG,gBAAgB;AAE5C,UAAA,KAAK,IAAIC,aAAA,QAAI,GAAG;AAAA,MACpB,YAAY;AAAA,MACZ,GAAG;AAAA,IAAA,CACJ;AAEK,UAAA,aAAa,WAAW,GAAG,SAAS,QAAQ,QAAQ,EAAE,CAAC,MAAM;AAE7D,UAAA,aAAa,CAAC,SAAe;AACjC,YAAM,OAAO,KAAK,OAAO,GAAG,KAAK,IAAI,MAAM;AAEpC,aAAA,GAAG,UAAU,GAAG,IAAI,GAAG,KAAK,IAAI,GAAG,KAAK,GAAG;AAAA,IAAA;AAGpD,UAAM,MAAMC,GAAAA,MAAM,eAAe,CAAC,UAAU,KAAK,GAAG,MAAM;AAEpD,UAAA,SAAS,CAAC,MAAY,eAAe,OACzC,IAAI,QAAQ,CAAC,SAAS,WAAW;AACzB,YAAA,UAAU,WAAW,IAAI;AAE/B,UAAI,CAAC,KAAK,UAAU,CAAC,KAAK,QAAQ;AACzB,eAAA,IAAI,MAAM,+BAA+B,CAAC;AACjD;AAAA,MACF;AAEA,YAAM,SAAS;AAAA,QACb,KAAK;AAAA,QACL,QAAQ,OAAO,OAAO;AAAA,QACtB,MAAM,KAAK,UAAU,KAAK;AAAA,QAC1B;AAAA,QACA,aAAa,KAAK;AAAA,QAClB,GAAG;AAAA,MAAA;AAGC,YAAA,aAAa,CAAC,KAAY,SAAwC;AACtE,YAAI,KAAK;AACP,iBAAO,OAAO,GAAG;AAAA,QACnB;AAGA,YAAI,SAAS;AAEX,eAAK,MAAM,GAAG,OAAO,IAAI,OAAO;AAAA,QAAA,OAC3B;AAGA,eAAA,MAAM,eAAe,KAAK,QAAQ,IAAI,KAAK,WAAW,WAAW,KAAK,QAAQ;AAAA,QACrF;AACQ;MAAA;AAGP,SAAA,OAAO,QAAQ,UAAU;AAAA,IAAA,CAC7B;AAEI,WAAA;AAAA,MACL,YAAY;AACV,eAAO,QAAQ;AAAA,MACjB;AAAA,MACA,MAAM,aAAa,MAAsC;AAEnD,YAAA,CAAC,gBAAgB,KAAK,KAAK,OAAO,OAAO,QAAQ,OAAO,GAAG;AACtD,iBAAA,EAAE,KAAK,KAAK;QACrB;AAEM,cAAA,mBAA2BA,SAAM,KAAK,IAAI,CAAC,UAAU,kBAAkB,GAAG,MAAM;AAEtF,eAAO,IAAI,QAAQ,CAAC,SAAS,WAAW;AAChC,gBAAA,UAAU,WAAW,IAAI;AAE5B,aAAA;AAAA,YACD;AAAA,YACA;AAAA,cACE,QAAQ,OAAO,OAAO;AAAA,cACtB,KAAK;AAAA,cACL,SAAS,SAAS,kBAAkB,EAAE;AAAA,YACxC;AAAA,YACA,CAAC,KAAK,QAAQ;AACZ,kBAAI,KAAK;AACP,uBAAO,OAAO,GAAG;AAAA,cACnB;AACQ,sBAAA,EAAE,KAAK;AAAA,YACjB;AAAA,UAAA;AAAA,QACF,CACD;AAAA,MACH;AAAA,MACA,aAAa,MAAY,eAAe,IAAI;AACnC,eAAA,OAAO,MAAM,YAAY;AAAA,MAClC;AAAA,MACA,OAAO,MAAY,eAAe,IAAI;AAC7B,eAAA,OAAO,MAAM,YAAY;AAAA,MAClC;AAAA,MACA,OAAO,MAAY,eAAe,IAAmB;AACnD,eAAO,IAAI,QAAQ,CAAC,SAAS,WAAW;AAEhC,gBAAA,UAAU,WAAW,IAAI;AAC5B,aAAA;AAAA,YACD;AAAA,cACE,KAAK;AAAA,cACL,QAAQ,OAAO,OAAO;AAAA,cACtB,GAAG;AAAA,YACL;AAAA,YACA,CAAC,QAAQ;AACP,kBAAI,KAAK;AACP,uBAAO,OAAO,GAAG;AAAA,cACnB;AAEQ;YACV;AAAA,UAAA;AAAA,QACF,CACD;AAAA,MACH;AAAA,IAAA;AAAA,EAEJ;AACF;;"}
|
|
1
|
+
{"version":3,"file":"index.js","sources":["../src/utils.ts","../src/index.ts"],"sourcesContent":["import type { AwsCredentialIdentity } from '@aws-sdk/types';\nimport type { InitOptions } from '.';\n\nconst ENDPOINT_PATTERN = /^(.+\\.)?s3[.-]([a-z0-9-]+)\\./;\n\ninterface BucketInfo {\n bucket?: string | null;\n err?: string;\n}\n\nexport function isUrlFromBucket(fileUrl: string, bucketName: string, baseUrl = ''): boolean {\n const url = new URL(fileUrl);\n\n // Check if the file URL is using a base URL (e.g. a CDN).\n // In this case do not sign the URL.\n if (baseUrl) {\n return false;\n }\n\n const { bucket } = getBucketFromAwsUrl(fileUrl);\n\n if (bucket) {\n return bucket === bucketName;\n }\n\n // File URL might be of an S3-compatible provider. (or an invalid URL)\n // In this case, check if the bucket name appears in the URL host or path.\n // e.g. https://minio.example.com/bucket-name/object-key\n // e.g. https://bucket.nyc3.digitaloceanspaces.com/folder/img.png\n return url.host.startsWith(`${bucketName}.`) || url.pathname.includes(`/${bucketName}/`);\n}\n\n/**\n * Parse the bucket name from a URL.\n * See all URL formats in https://docs.aws.amazon.com/AmazonS3/latest/userguide/access-bucket-intro.html\n *\n * @param {string} fileUrl - the URL to parse\n * @returns {object} result\n * @returns {string} result.bucket - the bucket name\n * @returns {string} result.err - if any\n */\nfunction getBucketFromAwsUrl(fileUrl: string): BucketInfo {\n const url = new URL(fileUrl);\n\n // S3://<bucket-name>/<key>\n if (url.protocol === 's3:') {\n const bucket = url.host;\n\n if (!bucket) {\n return { err: `Invalid S3 url: no bucket: ${url}` };\n }\n return { bucket };\n }\n\n if (!url.host) {\n return { err: `Invalid S3 url: no hostname: ${url}` };\n }\n\n const matches = url.host.match(ENDPOINT_PATTERN);\n if (!matches) {\n return { err: `Invalid S3 url: hostname does not appear to be a valid S3 endpoint: ${url}` };\n }\n\n const prefix = matches[1];\n // https://s3.amazonaws.com/<bucket-name>\n if (!prefix) {\n if (url.pathname === '/') {\n return { bucket: null };\n }\n\n const index = url.pathname.indexOf('/', 1);\n\n // https://s3.amazonaws.com/<bucket-name>\n if (index === -1) {\n return { bucket: url.pathname.substring(1) };\n }\n\n // https://s3.amazonaws.com/<bucket-name>/\n if (index === url.pathname.length - 1) {\n return { bucket: url.pathname.substring(1, index) };\n }\n\n // https://s3.amazonaws.com/<bucket-name>/key\n return { bucket: url.pathname.substring(1, index) };\n }\n\n // https://<bucket-name>.s3.amazonaws.com/\n return { bucket: prefix.substring(0, prefix.length - 1) };\n}\n\n// TODO Remove this in V5 since we will only support the new config structure\nexport const extractCredentials = (options: InitOptions): AwsCredentialIdentity => {\n // legacy\n if (options.accessKeyId && options.secretAccessKey) {\n return {\n accessKeyId: options.accessKeyId,\n secretAccessKey: options.secretAccessKey,\n };\n }\n // Legacy\n if (options.s3Options?.accessKeyId && options.s3Options.secretAccessKey) {\n process.emitWarning(\n 'Credentials passed directly to s3Options is deprecated and will be removed in a future release. Please wrap them inside a credentials object.'\n );\n return {\n accessKeyId: options.s3Options.accessKeyId,\n secretAccessKey: options.s3Options.secretAccessKey,\n };\n }\n // V5\n if (options.s3Options?.credentials) {\n return {\n accessKeyId: options.s3Options.credentials.accessKeyId,\n secretAccessKey: options.s3Options.credentials.secretAccessKey,\n };\n }\n\n throw new Error(\"Couldn't find AWS credentials.\");\n};\n","import type { ReadStream } from 'node:fs';\nimport { getOr } from 'lodash/fp';\nimport {\n S3Client,\n GetObjectCommand,\n DeleteObjectCommand,\n DeleteObjectCommandOutput,\n PutObjectCommandInput,\n CompleteMultipartUploadCommandOutput,\n AbortMultipartUploadCommandOutput,\n S3ClientConfig,\n ObjectCannedACL,\n} from '@aws-sdk/client-s3';\nimport type { AwsCredentialIdentity } from '@aws-sdk/types';\nimport { getSignedUrl } from '@aws-sdk/s3-request-presigner';\nimport { Upload } from '@aws-sdk/lib-storage';\nimport { extractCredentials, isUrlFromBucket } from './utils';\n\nexport interface File {\n name: string;\n alternativeText?: string;\n caption?: string;\n width?: number;\n height?: number;\n formats?: Record<string, unknown>;\n hash: string;\n ext?: string;\n mime: string;\n size: number;\n url: string;\n previewUrl?: string;\n path?: string;\n provider?: string;\n provider_metadata?: Record<string, unknown>;\n stream?: ReadStream;\n buffer?: Buffer;\n}\n\nexport type UploadCommandOutput = (\n | CompleteMultipartUploadCommandOutput\n | AbortMultipartUploadCommandOutput\n) & {\n Location: string;\n};\n\nexport interface AWSParams {\n Bucket: string; // making it required\n ACL?: ObjectCannedACL;\n signedUrlExpires?: number;\n}\n\nexport interface DefaultOptions extends S3ClientConfig {\n // TODO Remove this in V5\n accessKeyId?: AwsCredentialIdentity['accessKeyId'];\n secretAccessKey?: AwsCredentialIdentity['secretAccessKey'];\n // Keep this for V5\n credentials?: AwsCredentialIdentity;\n params?: AWSParams;\n [k: string]: any;\n}\n\nexport type InitOptions = (DefaultOptions | { s3Options: DefaultOptions }) & {\n baseUrl?: string;\n rootPath?: string;\n [k: string]: any;\n};\n\nconst assertUrlProtocol = (url: string) => {\n // Regex to test protocol like \"http://\", \"https://\"\n return /^\\w*:\\/\\//.test(url);\n};\n\nconst getConfig = ({ baseUrl, rootPath, s3Options, ...legacyS3Options }: InitOptions) => {\n if (Object.keys(legacyS3Options).length > 0) {\n process.emitWarning(\n \"S3 configuration options passed at root level of the plugin's providerOptions is deprecated and will be removed in a future release. Please wrap them inside the 's3Options:{}' property.\"\n );\n }\n\n const config = {\n ...s3Options,\n ...legacyS3Options,\n credentials: extractCredentials({ s3Options, ...legacyS3Options }),\n };\n\n config.params.ACL = getOr(ObjectCannedACL.public_read, ['params', 'ACL'], config);\n\n return config;\n};\n\nexport default {\n init({ baseUrl, rootPath, s3Options, ...legacyS3Options }: InitOptions) {\n // TODO V5 change config structure to avoid having to do this\n const config = getConfig({ baseUrl, rootPath, s3Options, ...legacyS3Options });\n const s3Client = new S3Client(config);\n const filePrefix = rootPath ? `${rootPath.replace(/\\/+$/, '')}/` : '';\n\n const getFileKey = (file: File) => {\n const path = file.path ? `${file.path}/` : '';\n return `${filePrefix}${path}${file.hash}${file.ext}`;\n };\n\n const upload = async (file: File, customParams: Partial<PutObjectCommandInput> = {}) => {\n const fileKey = getFileKey(file);\n const uploadObj = new Upload({\n client: s3Client,\n params: {\n Bucket: config.params.Bucket,\n Key: fileKey,\n Body: file.stream || Buffer.from(file.buffer as any, 'binary'),\n ACL: config.params.ACL,\n ContentType: file.mime,\n ...customParams,\n },\n });\n\n const upload = (await uploadObj.done()) as UploadCommandOutput;\n\n if (assertUrlProtocol(upload.Location)) {\n file.url = baseUrl ? `${baseUrl}/${fileKey}` : upload.Location;\n } else {\n // Default protocol to https protocol\n file.url = `https://${upload.Location}`;\n }\n };\n\n return {\n isPrivate() {\n return config.params.ACL === 'private';\n },\n\n async getSignedUrl(file: File, customParams: any): Promise<{ url: string }> {\n // Do not sign the url if it does not come from the same bucket.\n if (!isUrlFromBucket(file.url, config.params.Bucket, baseUrl)) {\n return { url: file.url };\n }\n const fileKey = getFileKey(file);\n\n const url = await getSignedUrl(\n s3Client,\n new GetObjectCommand({\n Bucket: config.params.Bucket,\n Key: fileKey,\n ...customParams,\n }),\n {\n expiresIn: getOr(15 * 60, ['params', 'signedUrlExpires'], config),\n }\n );\n\n return { url };\n },\n uploadStream(file: File, customParams = {}) {\n return upload(file, customParams);\n },\n upload(file: File, customParams = {}) {\n return upload(file, customParams);\n },\n delete(file: File, customParams = {}): Promise<DeleteObjectCommandOutput> {\n const command = new DeleteObjectCommand({\n Bucket: config.params.Bucket,\n Key: getFileKey(file),\n ...customParams,\n });\n return s3Client.send(command);\n },\n };\n },\n};\n"],"names":["index","getOr","ObjectCannedACL","S3Client","Upload","upload","getSignedUrl","GetObjectCommand","DeleteObjectCommand"],"mappings":";;;;;AAGA,MAAM,mBAAmB;AAOlB,SAAS,gBAAgB,SAAiB,YAAoB,UAAU,IAAa;AACpF,QAAA,MAAM,IAAI,IAAI,OAAO;AAI3B,MAAI,SAAS;AACJ,WAAA;AAAA,EACT;AAEA,QAAM,EAAE,OAAA,IAAW,oBAAoB,OAAO;AAE9C,MAAI,QAAQ;AACV,WAAO,WAAW;AAAA,EACpB;AAMA,SAAO,IAAI,KAAK,WAAW,GAAG,UAAU,GAAG,KAAK,IAAI,SAAS,SAAS,IAAI,UAAU,GAAG;AACzF;AAWA,SAAS,oBAAoB,SAA6B;AAClD,QAAA,MAAM,IAAI,IAAI,OAAO;AAGvB,MAAA,IAAI,aAAa,OAAO;AAC1B,UAAM,SAAS,IAAI;AAEnB,QAAI,CAAC,QAAQ;AACX,aAAO,EAAE,KAAK,8BAA8B,GAAG,GAAG;AAAA,IACpD;AACA,WAAO,EAAE,OAAO;AAAA,EAClB;AAEI,MAAA,CAAC,IAAI,MAAM;AACb,WAAO,EAAE,KAAK,gCAAgC,GAAG,GAAG;AAAA,EACtD;AAEA,QAAM,UAAU,IAAI,KAAK,MAAM,gBAAgB;AAC/C,MAAI,CAAC,SAAS;AACZ,WAAO,EAAE,KAAK,uEAAuE,GAAG,GAAG;AAAA,EAC7F;AAEM,QAAA,SAAS,QAAQ,CAAC;AAExB,MAAI,CAAC,QAAQ;AACP,QAAA,IAAI,aAAa,KAAK;AACjB,aAAA,EAAE,QAAQ;IACnB;AAEA,UAAMA,SAAQ,IAAI,SAAS,QAAQ,KAAK,CAAC;AAGzC,QAAIA,WAAU,IAAI;AAChB,aAAO,EAAE,QAAQ,IAAI,SAAS,UAAU,CAAC;IAC3C;AAGA,QAAIA,WAAU,IAAI,SAAS,SAAS,GAAG;AACrC,aAAO,EAAE,QAAQ,IAAI,SAAS,UAAU,GAAGA,MAAK;IAClD;AAGA,WAAO,EAAE,QAAQ,IAAI,SAAS,UAAU,GAAGA,MAAK;EAClD;AAGO,SAAA,EAAE,QAAQ,OAAO,UAAU,GAAG,OAAO,SAAS,CAAC;AACxD;AAGa,MAAA,qBAAqB,CAAC,YAAgD;AAE7E,MAAA,QAAQ,eAAe,QAAQ,iBAAiB;AAC3C,WAAA;AAAA,MACL,aAAa,QAAQ;AAAA,MACrB,iBAAiB,QAAQ;AAAA,IAAA;AAAA,EAE7B;AAEA,MAAI,QAAQ,WAAW,eAAe,QAAQ,UAAU,iBAAiB;AAC/D,YAAA;AAAA,MACN;AAAA,IAAA;AAEK,WAAA;AAAA,MACL,aAAa,QAAQ,UAAU;AAAA,MAC/B,iBAAiB,QAAQ,UAAU;AAAA,IAAA;AAAA,EAEvC;AAEI,MAAA,QAAQ,WAAW,aAAa;AAC3B,WAAA;AAAA,MACL,aAAa,QAAQ,UAAU,YAAY;AAAA,MAC3C,iBAAiB,QAAQ,UAAU,YAAY;AAAA,IAAA;AAAA,EAEnD;AAEM,QAAA,IAAI,MAAM,gCAAgC;AAClD;ACnDA,MAAM,oBAAoB,CAAC,QAAgB;AAElC,SAAA,YAAY,KAAK,GAAG;AAC7B;AAEA,MAAM,YAAY,CAAC,EAAE,SAAS,UAAU,WAAW,GAAG,sBAAmC;AACvF,MAAI,OAAO,KAAK,eAAe,EAAE,SAAS,GAAG;AACnC,YAAA;AAAA,MACN;AAAA,IAAA;AAAA,EAEJ;AAEA,QAAM,SAAS;AAAA,IACb,GAAG;AAAA,IACH,GAAG;AAAA,IACH,aAAa,mBAAmB,EAAE,WAAW,GAAG,iBAAiB;AAAA,EAAA;AAG5D,SAAA,OAAO,MAAMC,GAAAA,MAAMC,SAAA,gBAAgB,aAAa,CAAC,UAAU,KAAK,GAAG,MAAM;AAEzE,SAAA;AACT;AAEA,MAAe,QAAA;AAAA,EACb,KAAK,EAAE,SAAS,UAAU,WAAW,GAAG,mBAAgC;AAEhE,UAAA,SAAS,UAAU,EAAE,SAAS,UAAU,WAAW,GAAG,iBAAiB;AACvE,UAAA,WAAW,IAAIC,kBAAS,MAAM;AAC9B,UAAA,aAAa,WAAW,GAAG,SAAS,QAAQ,QAAQ,EAAE,CAAC,MAAM;AAE7D,UAAA,aAAa,CAAC,SAAe;AACjC,YAAM,OAAO,KAAK,OAAO,GAAG,KAAK,IAAI,MAAM;AACpC,aAAA,GAAG,UAAU,GAAG,IAAI,GAAG,KAAK,IAAI,GAAG,KAAK,GAAG;AAAA,IAAA;AAGpD,UAAM,SAAS,OAAO,MAAY,eAA+C,CAAA,MAAO;AAChF,YAAA,UAAU,WAAW,IAAI;AACzB,YAAA,YAAY,IAAIC,kBAAO;AAAA,QAC3B,QAAQ;AAAA,QACR,QAAQ;AAAA,UACN,QAAQ,OAAO,OAAO;AAAA,UACtB,KAAK;AAAA,UACL,MAAM,KAAK,UAAU,OAAO,KAAK,KAAK,QAAe,QAAQ;AAAA,UAC7D,KAAK,OAAO,OAAO;AAAA,UACnB,aAAa,KAAK;AAAA,UAClB,GAAG;AAAA,QACL;AAAA,MAAA,CACD;AAEKC,YAAAA,UAAU,MAAM,UAAU;AAE5B,UAAA,kBAAkBA,QAAO,QAAQ,GAAG;AACtC,aAAK,MAAM,UAAU,GAAG,OAAO,IAAI,OAAO,KAAKA,QAAO;AAAA,MAAA,OACjD;AAEA,aAAA,MAAM,WAAWA,QAAO,QAAQ;AAAA,MACvC;AAAA,IAAA;AAGK,WAAA;AAAA,MACL,YAAY;AACH,eAAA,OAAO,OAAO,QAAQ;AAAA,MAC/B;AAAA,MAEA,MAAM,aAAa,MAAY,cAA6C;AAEtE,YAAA,CAAC,gBAAgB,KAAK,KAAK,OAAO,OAAO,QAAQ,OAAO,GAAG;AACtD,iBAAA,EAAE,KAAK,KAAK;QACrB;AACM,cAAA,UAAU,WAAW,IAAI;AAE/B,cAAM,MAAM,MAAMC,mBAAA;AAAA,UAChB;AAAA,UACA,IAAIC,0BAAiB;AAAA,YACnB,QAAQ,OAAO,OAAO;AAAA,YACtB,KAAK;AAAA,YACL,GAAG;AAAA,UAAA,CACJ;AAAA,UACD;AAAA,YACE,WAAWN,SAAM,KAAK,IAAI,CAAC,UAAU,kBAAkB,GAAG,MAAM;AAAA,UAClE;AAAA,QAAA;AAGF,eAAO,EAAE,IAAI;AAAA,MACf;AAAA,MACA,aAAa,MAAY,eAAe,IAAI;AACnC,eAAA,OAAO,MAAM,YAAY;AAAA,MAClC;AAAA,MACA,OAAO,MAAY,eAAe,IAAI;AAC7B,eAAA,OAAO,MAAM,YAAY;AAAA,MAClC;AAAA,MACA,OAAO,MAAY,eAAe,IAAwC;AAClE,cAAA,UAAU,IAAIO,6BAAoB;AAAA,UACtC,QAAQ,OAAO,OAAO;AAAA,UACtB,KAAK,WAAW,IAAI;AAAA,UACpB,GAAG;AAAA,QAAA,CACJ;AACM,eAAA,SAAS,KAAK,OAAO;AAAA,MAC9B;AAAA,IAAA;AAAA,EAEJ;AACF;;"}
|
package/dist/index.mjs
CHANGED
|
@@ -1,5 +1,7 @@
|
|
|
1
1
|
import { getOr } from "lodash/fp";
|
|
2
|
-
import
|
|
2
|
+
import { S3Client, GetObjectCommand, DeleteObjectCommand, ObjectCannedACL } from "@aws-sdk/client-s3";
|
|
3
|
+
import { getSignedUrl } from "@aws-sdk/s3-request-presigner";
|
|
4
|
+
import { Upload } from "@aws-sdk/lib-storage";
|
|
3
5
|
const ENDPOINT_PATTERN = /^(.+\.)?s3[.-]([a-z0-9-]+)\./;
|
|
4
6
|
function isUrlFromBucket(fileUrl, bucketName, baseUrl = "") {
|
|
5
7
|
const url = new URL(fileUrl);
|
|
@@ -44,81 +46,97 @@ function getBucketFromAwsUrl(fileUrl) {
|
|
|
44
46
|
}
|
|
45
47
|
return { bucket: prefix.substring(0, prefix.length - 1) };
|
|
46
48
|
}
|
|
47
|
-
|
|
48
|
-
|
|
49
|
+
const extractCredentials = (options) => {
|
|
50
|
+
if (options.accessKeyId && options.secretAccessKey) {
|
|
51
|
+
return {
|
|
52
|
+
accessKeyId: options.accessKeyId,
|
|
53
|
+
secretAccessKey: options.secretAccessKey
|
|
54
|
+
};
|
|
55
|
+
}
|
|
56
|
+
if (options.s3Options?.accessKeyId && options.s3Options.secretAccessKey) {
|
|
57
|
+
process.emitWarning(
|
|
58
|
+
"Credentials passed directly to s3Options is deprecated and will be removed in a future release. Please wrap them inside a credentials object."
|
|
59
|
+
);
|
|
60
|
+
return {
|
|
61
|
+
accessKeyId: options.s3Options.accessKeyId,
|
|
62
|
+
secretAccessKey: options.s3Options.secretAccessKey
|
|
63
|
+
};
|
|
64
|
+
}
|
|
65
|
+
if (options.s3Options?.credentials) {
|
|
66
|
+
return {
|
|
67
|
+
accessKeyId: options.s3Options.credentials.accessKeyId,
|
|
68
|
+
secretAccessKey: options.s3Options.credentials.secretAccessKey
|
|
69
|
+
};
|
|
70
|
+
}
|
|
71
|
+
throw new Error("Couldn't find AWS credentials.");
|
|
72
|
+
};
|
|
73
|
+
const assertUrlProtocol = (url) => {
|
|
49
74
|
return /^\w*:\/\//.test(url);
|
|
50
|
-
}
|
|
75
|
+
};
|
|
76
|
+
const getConfig = ({ baseUrl, rootPath, s3Options, ...legacyS3Options }) => {
|
|
77
|
+
if (Object.keys(legacyS3Options).length > 0) {
|
|
78
|
+
process.emitWarning(
|
|
79
|
+
"S3 configuration options passed at root level of the plugin's providerOptions is deprecated and will be removed in a future release. Please wrap them inside the 's3Options:{}' property."
|
|
80
|
+
);
|
|
81
|
+
}
|
|
82
|
+
const config = {
|
|
83
|
+
...s3Options,
|
|
84
|
+
...legacyS3Options,
|
|
85
|
+
credentials: extractCredentials({ s3Options, ...legacyS3Options })
|
|
86
|
+
};
|
|
87
|
+
config.params.ACL = getOr(ObjectCannedACL.public_read, ["params", "ACL"], config);
|
|
88
|
+
return config;
|
|
89
|
+
};
|
|
51
90
|
const index = {
|
|
52
91
|
init({ baseUrl, rootPath, s3Options, ...legacyS3Options }) {
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
"S3 configuration options passed at root level of the plugin's providerOptions is deprecated and will be removed in a future release. Please wrap them inside the 's3Options:{}' property."
|
|
56
|
-
);
|
|
57
|
-
}
|
|
58
|
-
const config = { ...s3Options, ...legacyS3Options };
|
|
59
|
-
const S3 = new AWS.S3({
|
|
60
|
-
apiVersion: "2006-03-01",
|
|
61
|
-
...config
|
|
62
|
-
});
|
|
92
|
+
const config = getConfig({ baseUrl, rootPath, s3Options, ...legacyS3Options });
|
|
93
|
+
const s3Client = new S3Client(config);
|
|
63
94
|
const filePrefix = rootPath ? `${rootPath.replace(/\/+$/, "")}/` : "";
|
|
64
95
|
const getFileKey = (file) => {
|
|
65
96
|
const path = file.path ? `${file.path}/` : "";
|
|
66
97
|
return `${filePrefix}${path}${file.hash}${file.ext}`;
|
|
67
98
|
};
|
|
68
|
-
const
|
|
69
|
-
const upload = (file, customParams = {}) => new Promise((resolve, reject) => {
|
|
99
|
+
const upload = async (file, customParams = {}) => {
|
|
70
100
|
const fileKey = getFileKey(file);
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
ContentType: file.mime,
|
|
81
|
-
...customParams
|
|
82
|
-
};
|
|
83
|
-
const onUploaded = (err, data) => {
|
|
84
|
-
if (err) {
|
|
85
|
-
return reject(err);
|
|
86
|
-
}
|
|
87
|
-
if (baseUrl) {
|
|
88
|
-
file.url = `${baseUrl}/${fileKey}`;
|
|
89
|
-
} else {
|
|
90
|
-
file.url = hasUrlProtocol(data.Location) ? data.Location : `https://${data.Location}`;
|
|
101
|
+
const uploadObj = new Upload({
|
|
102
|
+
client: s3Client,
|
|
103
|
+
params: {
|
|
104
|
+
Bucket: config.params.Bucket,
|
|
105
|
+
Key: fileKey,
|
|
106
|
+
Body: file.stream || Buffer.from(file.buffer, "binary"),
|
|
107
|
+
ACL: config.params.ACL,
|
|
108
|
+
ContentType: file.mime,
|
|
109
|
+
...customParams
|
|
91
110
|
}
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
|
|
111
|
+
});
|
|
112
|
+
const upload2 = await uploadObj.done();
|
|
113
|
+
if (assertUrlProtocol(upload2.Location)) {
|
|
114
|
+
file.url = baseUrl ? `${baseUrl}/${fileKey}` : upload2.Location;
|
|
115
|
+
} else {
|
|
116
|
+
file.url = `https://${upload2.Location}`;
|
|
117
|
+
}
|
|
118
|
+
};
|
|
96
119
|
return {
|
|
97
120
|
isPrivate() {
|
|
98
|
-
return ACL === "private";
|
|
121
|
+
return config.params.ACL === "private";
|
|
99
122
|
},
|
|
100
|
-
async getSignedUrl(file) {
|
|
123
|
+
async getSignedUrl(file, customParams) {
|
|
101
124
|
if (!isUrlFromBucket(file.url, config.params.Bucket, baseUrl)) {
|
|
102
125
|
return { url: file.url };
|
|
103
126
|
}
|
|
104
|
-
const
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
}
|
|
118
|
-
resolve({ url });
|
|
119
|
-
}
|
|
120
|
-
);
|
|
121
|
-
});
|
|
127
|
+
const fileKey = getFileKey(file);
|
|
128
|
+
const url = await getSignedUrl(
|
|
129
|
+
s3Client,
|
|
130
|
+
new GetObjectCommand({
|
|
131
|
+
Bucket: config.params.Bucket,
|
|
132
|
+
Key: fileKey,
|
|
133
|
+
...customParams
|
|
134
|
+
}),
|
|
135
|
+
{
|
|
136
|
+
expiresIn: getOr(15 * 60, ["params", "signedUrlExpires"], config)
|
|
137
|
+
}
|
|
138
|
+
);
|
|
139
|
+
return { url };
|
|
122
140
|
},
|
|
123
141
|
uploadStream(file, customParams = {}) {
|
|
124
142
|
return upload(file, customParams);
|
|
@@ -127,22 +145,12 @@ const index = {
|
|
|
127
145
|
return upload(file, customParams);
|
|
128
146
|
},
|
|
129
147
|
delete(file, customParams = {}) {
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
|
|
134
|
-
Key: fileKey,
|
|
135
|
-
Bucket: config.params.Bucket,
|
|
136
|
-
...customParams
|
|
137
|
-
},
|
|
138
|
-
(err) => {
|
|
139
|
-
if (err) {
|
|
140
|
-
return reject(err);
|
|
141
|
-
}
|
|
142
|
-
resolve();
|
|
143
|
-
}
|
|
144
|
-
);
|
|
148
|
+
const command = new DeleteObjectCommand({
|
|
149
|
+
Bucket: config.params.Bucket,
|
|
150
|
+
Key: getFileKey(file),
|
|
151
|
+
...customParams
|
|
145
152
|
});
|
|
153
|
+
return s3Client.send(command);
|
|
146
154
|
}
|
|
147
155
|
};
|
|
148
156
|
}
|
package/dist/index.mjs.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"index.mjs","sources":["../src/utils.ts","../src/index.ts"],"sourcesContent":["const ENDPOINT_PATTERN = /^(.+\\.)?s3[.-]([a-z0-9-]+)\\./;\n\ninterface BucketInfo {\n bucket?: string | null;\n err?: string;\n}\n\nexport function isUrlFromBucket(fileUrl: string, bucketName: string, baseUrl = ''): boolean {\n const url = new URL(fileUrl);\n\n // Check if the file URL is using a base URL (e.g. a CDN).\n // In this case do not sign the URL.\n if (baseUrl) {\n return false;\n }\n\n const { bucket } = getBucketFromAwsUrl(fileUrl);\n\n if (bucket) {\n return bucket === bucketName;\n }\n\n // File URL might be of an S3-compatible provider. (or an invalid URL)\n // In this case, check if the bucket name appears in the URL host or path.\n // e.g. https://minio.example.com/bucket-name/object-key\n // e.g. https://bucket.nyc3.digitaloceanspaces.com/folder/img.png\n return url.host.startsWith(`${bucketName}.`) || url.pathname.includes(`/${bucketName}/`);\n}\n\n/**\n * Parse the bucket name from a URL.\n * See all URL formats in https://docs.aws.amazon.com/AmazonS3/latest/userguide/access-bucket-intro.html\n *\n * @param {string} fileUrl - the URL to parse\n * @returns {object} result\n * @returns {string} result.bucket - the bucket name\n * @returns {string} result.err - if any\n */\nfunction getBucketFromAwsUrl(fileUrl: string): BucketInfo {\n const url = new URL(fileUrl);\n\n // S3://<bucket-name>/<key>\n if (url.protocol === 's3:') {\n const bucket = url.host;\n\n if (!bucket) {\n return { err: `Invalid S3 url: no bucket: ${url}` };\n }\n return { bucket };\n }\n\n if (!url.host) {\n return { err: `Invalid S3 url: no hostname: ${url}` };\n }\n\n const matches = url.host.match(ENDPOINT_PATTERN);\n if (!matches) {\n return { err: `Invalid S3 url: hostname does not appear to be a valid S3 endpoint: ${url}` };\n }\n\n const prefix = matches[1];\n // https://s3.amazonaws.com/<bucket-name>\n if (!prefix) {\n if (url.pathname === '/') {\n return { bucket: null };\n }\n\n const index = url.pathname.indexOf('/', 1);\n\n // https://s3.amazonaws.com/<bucket-name>\n if (index === -1) {\n return { bucket: url.pathname.substring(1) };\n }\n\n // https://s3.amazonaws.com/<bucket-name>/\n if (index === url.pathname.length - 1) {\n return { bucket: url.pathname.substring(1, index) };\n }\n\n // https://s3.amazonaws.com/<bucket-name>/key\n return { bucket: url.pathname.substring(1, index) };\n }\n\n // https://<bucket-name>.s3.amazonaws.com/\n return { bucket: prefix.substring(0, prefix.length - 1) };\n}\n","import type { ReadStream } from 'node:fs';\nimport { getOr } from 'lodash/fp';\nimport AWS from 'aws-sdk';\nimport { isUrlFromBucket } from './utils';\n\ninterface File {\n name: string;\n alternativeText?: string;\n caption?: string;\n width?: number;\n height?: number;\n formats?: Record<string, unknown>;\n hash: string;\n ext?: string;\n mime: string;\n size: number;\n url: string;\n previewUrl?: string;\n path?: string;\n provider?: string;\n provider_metadata?: Record<string, unknown>;\n stream?: ReadStream;\n buffer?: Buffer;\n}\n\n// TODO V5: Migrate to aws-sdk v3\n// eslint-disable-next-line @typescript-eslint/no-var-requires\nrequire('aws-sdk/lib/maintenance_mode_message').suppress = true;\n\nfunction hasUrlProtocol(url: string) {\n // Regex to test protocol like \"http://\", \"https://\"\n return /^\\w*:\\/\\//.test(url);\n}\n\ninterface InitOptions extends Partial<AWS.S3.ClientConfiguration> {\n baseUrl?: string;\n rootPath?: string;\n s3Options: AWS.S3.ClientConfiguration & {\n params: {\n Bucket: string; // making it required\n ACL?: string;\n signedUrlExpires?: string;\n };\n };\n}\n\nexport default {\n init({ baseUrl, rootPath, s3Options, ...legacyS3Options }: InitOptions) {\n if (Object.keys(legacyS3Options).length > 0) {\n process.emitWarning(\n \"S3 configuration options passed at root level of the plugin's providerOptions is deprecated and will be removed in a future release. Please wrap them inside the 's3Options:{}' property.\"\n );\n }\n\n const config = { ...s3Options, ...legacyS3Options };\n\n const S3 = new AWS.S3({\n apiVersion: '2006-03-01',\n ...config,\n });\n\n const filePrefix = rootPath ? `${rootPath.replace(/\\/+$/, '')}/` : '';\n\n const getFileKey = (file: File) => {\n const path = file.path ? `${file.path}/` : '';\n\n return `${filePrefix}${path}${file.hash}${file.ext}`;\n };\n\n const ACL = getOr('public-read', ['params', 'ACL'], config);\n\n const upload = (file: File, customParams = {}): Promise<void> =>\n new Promise((resolve, reject) => {\n const fileKey = getFileKey(file);\n\n if (!file.stream && !file.buffer) {\n reject(new Error('Missing file stream or buffer'));\n return;\n }\n\n const params = {\n Key: fileKey,\n Bucket: config.params.Bucket,\n Body: file.stream || file.buffer,\n ACL,\n ContentType: file.mime,\n ...customParams,\n };\n\n const onUploaded = (err: Error, data: AWS.S3.ManagedUpload.SendData) => {\n if (err) {\n return reject(err);\n }\n\n // set the bucket file url\n if (baseUrl) {\n // Construct the url with the baseUrl\n file.url = `${baseUrl}/${fileKey}`;\n } else {\n // Add the protocol if it is missing\n // Some providers like DigitalOcean Spaces return the url without the protocol\n file.url = hasUrlProtocol(data.Location) ? data.Location : `https://${data.Location}`;\n }\n resolve();\n };\n\n S3.upload(params, onUploaded);\n });\n\n return {\n isPrivate() {\n return ACL === 'private';\n },\n async getSignedUrl(file: File): Promise<{ url: string }> {\n // Do not sign the url if it does not come from the same bucket.\n if (!isUrlFromBucket(file.url, config.params.Bucket, baseUrl)) {\n return { url: file.url };\n }\n\n const signedUrlExpires: string = getOr(15 * 60, ['params', 'signedUrlExpires'], config); // 15 minutes\n\n return new Promise((resolve, reject) => {\n const fileKey = getFileKey(file);\n\n S3.getSignedUrl(\n 'getObject',\n {\n Bucket: config.params.Bucket,\n Key: fileKey,\n Expires: parseInt(signedUrlExpires, 10),\n },\n (err, url) => {\n if (err) {\n return reject(err);\n }\n resolve({ url });\n }\n );\n });\n },\n uploadStream(file: File, customParams = {}) {\n return upload(file, customParams);\n },\n upload(file: File, customParams = {}) {\n return upload(file, customParams);\n },\n delete(file: File, customParams = {}): Promise<void> {\n return new Promise((resolve, reject) => {\n // delete file on S3 bucket\n const fileKey = getFileKey(file);\n S3.deleteObject(\n {\n Key: fileKey,\n Bucket: config.params.Bucket,\n ...customParams,\n },\n (err) => {\n if (err) {\n return reject(err);\n }\n\n resolve();\n }\n );\n });\n },\n };\n },\n};\n"],"names":["index"],"mappings":";;AAAA,MAAM,mBAAmB;AAOlB,SAAS,gBAAgB,SAAiB,YAAoB,UAAU,IAAa;AACpF,QAAA,MAAM,IAAI,IAAI,OAAO;AAI3B,MAAI,SAAS;AACJ,WAAA;AAAA,EACT;AAEA,QAAM,EAAE,OAAA,IAAW,oBAAoB,OAAO;AAE9C,MAAI,QAAQ;AACV,WAAO,WAAW;AAAA,EACpB;AAMA,SAAO,IAAI,KAAK,WAAW,GAAG,UAAU,GAAG,KAAK,IAAI,SAAS,SAAS,IAAI,UAAU,GAAG;AACzF;AAWA,SAAS,oBAAoB,SAA6B;AAClD,QAAA,MAAM,IAAI,IAAI,OAAO;AAGvB,MAAA,IAAI,aAAa,OAAO;AAC1B,UAAM,SAAS,IAAI;AAEnB,QAAI,CAAC,QAAQ;AACX,aAAO,EAAE,KAAK,8BAA8B,GAAG,GAAG;AAAA,IACpD;AACA,WAAO,EAAE,OAAO;AAAA,EAClB;AAEI,MAAA,CAAC,IAAI,MAAM;AACb,WAAO,EAAE,KAAK,gCAAgC,GAAG,GAAG;AAAA,EACtD;AAEA,QAAM,UAAU,IAAI,KAAK,MAAM,gBAAgB;AAC/C,MAAI,CAAC,SAAS;AACZ,WAAO,EAAE,KAAK,uEAAuE,GAAG,GAAG;AAAA,EAC7F;AAEM,QAAA,SAAS,QAAQ,CAAC;AAExB,MAAI,CAAC,QAAQ;AACP,QAAA,IAAI,aAAa,KAAK;AACjB,aAAA,EAAE,QAAQ;IACnB;AAEA,UAAMA,SAAQ,IAAI,SAAS,QAAQ,KAAK,CAAC;AAGzC,QAAIA,WAAU,IAAI;AAChB,aAAO,EAAE,QAAQ,IAAI,SAAS,UAAU,CAAC;IAC3C;AAGA,QAAIA,WAAU,IAAI,SAAS,SAAS,GAAG;AACrC,aAAO,EAAE,QAAQ,IAAI,SAAS,UAAU,GAAGA,MAAK;IAClD;AAGA,WAAO,EAAE,QAAQ,IAAI,SAAS,UAAU,GAAGA,MAAK;EAClD;AAGO,SAAA,EAAE,QAAQ,OAAO,UAAU,GAAG,OAAO,SAAS,CAAC;AACxD;AC1DA,QAAQ,sCAAsC,EAAE,WAAW;AAE3D,SAAS,eAAe,KAAa;AAE5B,SAAA,YAAY,KAAK,GAAG;AAC7B;AAcA,MAAe,QAAA;AAAA,EACb,KAAK,EAAE,SAAS,UAAU,WAAW,GAAG,mBAAgC;AACtE,QAAI,OAAO,KAAK,eAAe,EAAE,SAAS,GAAG;AACnC,cAAA;AAAA,QACN;AAAA,MAAA;AAAA,IAEJ;AAEA,UAAM,SAAS,EAAE,GAAG,WAAW,GAAG,gBAAgB;AAE5C,UAAA,KAAK,IAAI,IAAI,GAAG;AAAA,MACpB,YAAY;AAAA,MACZ,GAAG;AAAA,IAAA,CACJ;AAEK,UAAA,aAAa,WAAW,GAAG,SAAS,QAAQ,QAAQ,EAAE,CAAC,MAAM;AAE7D,UAAA,aAAa,CAAC,SAAe;AACjC,YAAM,OAAO,KAAK,OAAO,GAAG,KAAK,IAAI,MAAM;AAEpC,aAAA,GAAG,UAAU,GAAG,IAAI,GAAG,KAAK,IAAI,GAAG,KAAK,GAAG;AAAA,IAAA;AAGpD,UAAM,MAAM,MAAM,eAAe,CAAC,UAAU,KAAK,GAAG,MAAM;AAEpD,UAAA,SAAS,CAAC,MAAY,eAAe,OACzC,IAAI,QAAQ,CAAC,SAAS,WAAW;AACzB,YAAA,UAAU,WAAW,IAAI;AAE/B,UAAI,CAAC,KAAK,UAAU,CAAC,KAAK,QAAQ;AACzB,eAAA,IAAI,MAAM,+BAA+B,CAAC;AACjD;AAAA,MACF;AAEA,YAAM,SAAS;AAAA,QACb,KAAK;AAAA,QACL,QAAQ,OAAO,OAAO;AAAA,QACtB,MAAM,KAAK,UAAU,KAAK;AAAA,QAC1B;AAAA,QACA,aAAa,KAAK;AAAA,QAClB,GAAG;AAAA,MAAA;AAGC,YAAA,aAAa,CAAC,KAAY,SAAwC;AACtE,YAAI,KAAK;AACP,iBAAO,OAAO,GAAG;AAAA,QACnB;AAGA,YAAI,SAAS;AAEX,eAAK,MAAM,GAAG,OAAO,IAAI,OAAO;AAAA,QAAA,OAC3B;AAGA,eAAA,MAAM,eAAe,KAAK,QAAQ,IAAI,KAAK,WAAW,WAAW,KAAK,QAAQ;AAAA,QACrF;AACQ;MAAA;AAGP,SAAA,OAAO,QAAQ,UAAU;AAAA,IAAA,CAC7B;AAEI,WAAA;AAAA,MACL,YAAY;AACV,eAAO,QAAQ;AAAA,MACjB;AAAA,MACA,MAAM,aAAa,MAAsC;AAEnD,YAAA,CAAC,gBAAgB,KAAK,KAAK,OAAO,OAAO,QAAQ,OAAO,GAAG;AACtD,iBAAA,EAAE,KAAK,KAAK;QACrB;AAEM,cAAA,mBAA2B,MAAM,KAAK,IAAI,CAAC,UAAU,kBAAkB,GAAG,MAAM;AAEtF,eAAO,IAAI,QAAQ,CAAC,SAAS,WAAW;AAChC,gBAAA,UAAU,WAAW,IAAI;AAE5B,aAAA;AAAA,YACD;AAAA,YACA;AAAA,cACE,QAAQ,OAAO,OAAO;AAAA,cACtB,KAAK;AAAA,cACL,SAAS,SAAS,kBAAkB,EAAE;AAAA,YACxC;AAAA,YACA,CAAC,KAAK,QAAQ;AACZ,kBAAI,KAAK;AACP,uBAAO,OAAO,GAAG;AAAA,cACnB;AACQ,sBAAA,EAAE,KAAK;AAAA,YACjB;AAAA,UAAA;AAAA,QACF,CACD;AAAA,MACH;AAAA,MACA,aAAa,MAAY,eAAe,IAAI;AACnC,eAAA,OAAO,MAAM,YAAY;AAAA,MAClC;AAAA,MACA,OAAO,MAAY,eAAe,IAAI;AAC7B,eAAA,OAAO,MAAM,YAAY;AAAA,MAClC;AAAA,MACA,OAAO,MAAY,eAAe,IAAmB;AACnD,eAAO,IAAI,QAAQ,CAAC,SAAS,WAAW;AAEhC,gBAAA,UAAU,WAAW,IAAI;AAC5B,aAAA;AAAA,YACD;AAAA,cACE,KAAK;AAAA,cACL,QAAQ,OAAO,OAAO;AAAA,cACtB,GAAG;AAAA,YACL;AAAA,YACA,CAAC,QAAQ;AACP,kBAAI,KAAK;AACP,uBAAO,OAAO,GAAG;AAAA,cACnB;AAEQ;YACV;AAAA,UAAA;AAAA,QACF,CACD;AAAA,MACH;AAAA,IAAA;AAAA,EAEJ;AACF;"}
|
|
1
|
+
{"version":3,"file":"index.mjs","sources":["../src/utils.ts","../src/index.ts"],"sourcesContent":["import type { AwsCredentialIdentity } from '@aws-sdk/types';\nimport type { InitOptions } from '.';\n\nconst ENDPOINT_PATTERN = /^(.+\\.)?s3[.-]([a-z0-9-]+)\\./;\n\ninterface BucketInfo {\n bucket?: string | null;\n err?: string;\n}\n\nexport function isUrlFromBucket(fileUrl: string, bucketName: string, baseUrl = ''): boolean {\n const url = new URL(fileUrl);\n\n // Check if the file URL is using a base URL (e.g. a CDN).\n // In this case do not sign the URL.\n if (baseUrl) {\n return false;\n }\n\n const { bucket } = getBucketFromAwsUrl(fileUrl);\n\n if (bucket) {\n return bucket === bucketName;\n }\n\n // File URL might be of an S3-compatible provider. (or an invalid URL)\n // In this case, check if the bucket name appears in the URL host or path.\n // e.g. https://minio.example.com/bucket-name/object-key\n // e.g. https://bucket.nyc3.digitaloceanspaces.com/folder/img.png\n return url.host.startsWith(`${bucketName}.`) || url.pathname.includes(`/${bucketName}/`);\n}\n\n/**\n * Parse the bucket name from a URL.\n * See all URL formats in https://docs.aws.amazon.com/AmazonS3/latest/userguide/access-bucket-intro.html\n *\n * @param {string} fileUrl - the URL to parse\n * @returns {object} result\n * @returns {string} result.bucket - the bucket name\n * @returns {string} result.err - if any\n */\nfunction getBucketFromAwsUrl(fileUrl: string): BucketInfo {\n const url = new URL(fileUrl);\n\n // S3://<bucket-name>/<key>\n if (url.protocol === 's3:') {\n const bucket = url.host;\n\n if (!bucket) {\n return { err: `Invalid S3 url: no bucket: ${url}` };\n }\n return { bucket };\n }\n\n if (!url.host) {\n return { err: `Invalid S3 url: no hostname: ${url}` };\n }\n\n const matches = url.host.match(ENDPOINT_PATTERN);\n if (!matches) {\n return { err: `Invalid S3 url: hostname does not appear to be a valid S3 endpoint: ${url}` };\n }\n\n const prefix = matches[1];\n // https://s3.amazonaws.com/<bucket-name>\n if (!prefix) {\n if (url.pathname === '/') {\n return { bucket: null };\n }\n\n const index = url.pathname.indexOf('/', 1);\n\n // https://s3.amazonaws.com/<bucket-name>\n if (index === -1) {\n return { bucket: url.pathname.substring(1) };\n }\n\n // https://s3.amazonaws.com/<bucket-name>/\n if (index === url.pathname.length - 1) {\n return { bucket: url.pathname.substring(1, index) };\n }\n\n // https://s3.amazonaws.com/<bucket-name>/key\n return { bucket: url.pathname.substring(1, index) };\n }\n\n // https://<bucket-name>.s3.amazonaws.com/\n return { bucket: prefix.substring(0, prefix.length - 1) };\n}\n\n// TODO Remove this in V5 since we will only support the new config structure\nexport const extractCredentials = (options: InitOptions): AwsCredentialIdentity => {\n // legacy\n if (options.accessKeyId && options.secretAccessKey) {\n return {\n accessKeyId: options.accessKeyId,\n secretAccessKey: options.secretAccessKey,\n };\n }\n // Legacy\n if (options.s3Options?.accessKeyId && options.s3Options.secretAccessKey) {\n process.emitWarning(\n 'Credentials passed directly to s3Options is deprecated and will be removed in a future release. Please wrap them inside a credentials object.'\n );\n return {\n accessKeyId: options.s3Options.accessKeyId,\n secretAccessKey: options.s3Options.secretAccessKey,\n };\n }\n // V5\n if (options.s3Options?.credentials) {\n return {\n accessKeyId: options.s3Options.credentials.accessKeyId,\n secretAccessKey: options.s3Options.credentials.secretAccessKey,\n };\n }\n\n throw new Error(\"Couldn't find AWS credentials.\");\n};\n","import type { ReadStream } from 'node:fs';\nimport { getOr } from 'lodash/fp';\nimport {\n S3Client,\n GetObjectCommand,\n DeleteObjectCommand,\n DeleteObjectCommandOutput,\n PutObjectCommandInput,\n CompleteMultipartUploadCommandOutput,\n AbortMultipartUploadCommandOutput,\n S3ClientConfig,\n ObjectCannedACL,\n} from '@aws-sdk/client-s3';\nimport type { AwsCredentialIdentity } from '@aws-sdk/types';\nimport { getSignedUrl } from '@aws-sdk/s3-request-presigner';\nimport { Upload } from '@aws-sdk/lib-storage';\nimport { extractCredentials, isUrlFromBucket } from './utils';\n\nexport interface File {\n name: string;\n alternativeText?: string;\n caption?: string;\n width?: number;\n height?: number;\n formats?: Record<string, unknown>;\n hash: string;\n ext?: string;\n mime: string;\n size: number;\n url: string;\n previewUrl?: string;\n path?: string;\n provider?: string;\n provider_metadata?: Record<string, unknown>;\n stream?: ReadStream;\n buffer?: Buffer;\n}\n\nexport type UploadCommandOutput = (\n | CompleteMultipartUploadCommandOutput\n | AbortMultipartUploadCommandOutput\n) & {\n Location: string;\n};\n\nexport interface AWSParams {\n Bucket: string; // making it required\n ACL?: ObjectCannedACL;\n signedUrlExpires?: number;\n}\n\nexport interface DefaultOptions extends S3ClientConfig {\n // TODO Remove this in V5\n accessKeyId?: AwsCredentialIdentity['accessKeyId'];\n secretAccessKey?: AwsCredentialIdentity['secretAccessKey'];\n // Keep this for V5\n credentials?: AwsCredentialIdentity;\n params?: AWSParams;\n [k: string]: any;\n}\n\nexport type InitOptions = (DefaultOptions | { s3Options: DefaultOptions }) & {\n baseUrl?: string;\n rootPath?: string;\n [k: string]: any;\n};\n\nconst assertUrlProtocol = (url: string) => {\n // Regex to test protocol like \"http://\", \"https://\"\n return /^\\w*:\\/\\//.test(url);\n};\n\nconst getConfig = ({ baseUrl, rootPath, s3Options, ...legacyS3Options }: InitOptions) => {\n if (Object.keys(legacyS3Options).length > 0) {\n process.emitWarning(\n \"S3 configuration options passed at root level of the plugin's providerOptions is deprecated and will be removed in a future release. Please wrap them inside the 's3Options:{}' property.\"\n );\n }\n\n const config = {\n ...s3Options,\n ...legacyS3Options,\n credentials: extractCredentials({ s3Options, ...legacyS3Options }),\n };\n\n config.params.ACL = getOr(ObjectCannedACL.public_read, ['params', 'ACL'], config);\n\n return config;\n};\n\nexport default {\n init({ baseUrl, rootPath, s3Options, ...legacyS3Options }: InitOptions) {\n // TODO V5 change config structure to avoid having to do this\n const config = getConfig({ baseUrl, rootPath, s3Options, ...legacyS3Options });\n const s3Client = new S3Client(config);\n const filePrefix = rootPath ? `${rootPath.replace(/\\/+$/, '')}/` : '';\n\n const getFileKey = (file: File) => {\n const path = file.path ? `${file.path}/` : '';\n return `${filePrefix}${path}${file.hash}${file.ext}`;\n };\n\n const upload = async (file: File, customParams: Partial<PutObjectCommandInput> = {}) => {\n const fileKey = getFileKey(file);\n const uploadObj = new Upload({\n client: s3Client,\n params: {\n Bucket: config.params.Bucket,\n Key: fileKey,\n Body: file.stream || Buffer.from(file.buffer as any, 'binary'),\n ACL: config.params.ACL,\n ContentType: file.mime,\n ...customParams,\n },\n });\n\n const upload = (await uploadObj.done()) as UploadCommandOutput;\n\n if (assertUrlProtocol(upload.Location)) {\n file.url = baseUrl ? `${baseUrl}/${fileKey}` : upload.Location;\n } else {\n // Default protocol to https protocol\n file.url = `https://${upload.Location}`;\n }\n };\n\n return {\n isPrivate() {\n return config.params.ACL === 'private';\n },\n\n async getSignedUrl(file: File, customParams: any): Promise<{ url: string }> {\n // Do not sign the url if it does not come from the same bucket.\n if (!isUrlFromBucket(file.url, config.params.Bucket, baseUrl)) {\n return { url: file.url };\n }\n const fileKey = getFileKey(file);\n\n const url = await getSignedUrl(\n s3Client,\n new GetObjectCommand({\n Bucket: config.params.Bucket,\n Key: fileKey,\n ...customParams,\n }),\n {\n expiresIn: getOr(15 * 60, ['params', 'signedUrlExpires'], config),\n }\n );\n\n return { url };\n },\n uploadStream(file: File, customParams = {}) {\n return upload(file, customParams);\n },\n upload(file: File, customParams = {}) {\n return upload(file, customParams);\n },\n delete(file: File, customParams = {}): Promise<DeleteObjectCommandOutput> {\n const command = new DeleteObjectCommand({\n Bucket: config.params.Bucket,\n Key: getFileKey(file),\n ...customParams,\n });\n return s3Client.send(command);\n },\n };\n },\n};\n"],"names":["index","upload"],"mappings":";;;;AAGA,MAAM,mBAAmB;AAOlB,SAAS,gBAAgB,SAAiB,YAAoB,UAAU,IAAa;AACpF,QAAA,MAAM,IAAI,IAAI,OAAO;AAI3B,MAAI,SAAS;AACJ,WAAA;AAAA,EACT;AAEA,QAAM,EAAE,OAAA,IAAW,oBAAoB,OAAO;AAE9C,MAAI,QAAQ;AACV,WAAO,WAAW;AAAA,EACpB;AAMA,SAAO,IAAI,KAAK,WAAW,GAAG,UAAU,GAAG,KAAK,IAAI,SAAS,SAAS,IAAI,UAAU,GAAG;AACzF;AAWA,SAAS,oBAAoB,SAA6B;AAClD,QAAA,MAAM,IAAI,IAAI,OAAO;AAGvB,MAAA,IAAI,aAAa,OAAO;AAC1B,UAAM,SAAS,IAAI;AAEnB,QAAI,CAAC,QAAQ;AACX,aAAO,EAAE,KAAK,8BAA8B,GAAG,GAAG;AAAA,IACpD;AACA,WAAO,EAAE,OAAO;AAAA,EAClB;AAEI,MAAA,CAAC,IAAI,MAAM;AACb,WAAO,EAAE,KAAK,gCAAgC,GAAG,GAAG;AAAA,EACtD;AAEA,QAAM,UAAU,IAAI,KAAK,MAAM,gBAAgB;AAC/C,MAAI,CAAC,SAAS;AACZ,WAAO,EAAE,KAAK,uEAAuE,GAAG,GAAG;AAAA,EAC7F;AAEM,QAAA,SAAS,QAAQ,CAAC;AAExB,MAAI,CAAC,QAAQ;AACP,QAAA,IAAI,aAAa,KAAK;AACjB,aAAA,EAAE,QAAQ;IACnB;AAEA,UAAMA,SAAQ,IAAI,SAAS,QAAQ,KAAK,CAAC;AAGzC,QAAIA,WAAU,IAAI;AAChB,aAAO,EAAE,QAAQ,IAAI,SAAS,UAAU,CAAC;IAC3C;AAGA,QAAIA,WAAU,IAAI,SAAS,SAAS,GAAG;AACrC,aAAO,EAAE,QAAQ,IAAI,SAAS,UAAU,GAAGA,MAAK;IAClD;AAGA,WAAO,EAAE,QAAQ,IAAI,SAAS,UAAU,GAAGA,MAAK;EAClD;AAGO,SAAA,EAAE,QAAQ,OAAO,UAAU,GAAG,OAAO,SAAS,CAAC;AACxD;AAGa,MAAA,qBAAqB,CAAC,YAAgD;AAE7E,MAAA,QAAQ,eAAe,QAAQ,iBAAiB;AAC3C,WAAA;AAAA,MACL,aAAa,QAAQ;AAAA,MACrB,iBAAiB,QAAQ;AAAA,IAAA;AAAA,EAE7B;AAEA,MAAI,QAAQ,WAAW,eAAe,QAAQ,UAAU,iBAAiB;AAC/D,YAAA;AAAA,MACN;AAAA,IAAA;AAEK,WAAA;AAAA,MACL,aAAa,QAAQ,UAAU;AAAA,MAC/B,iBAAiB,QAAQ,UAAU;AAAA,IAAA;AAAA,EAEvC;AAEI,MAAA,QAAQ,WAAW,aAAa;AAC3B,WAAA;AAAA,MACL,aAAa,QAAQ,UAAU,YAAY;AAAA,MAC3C,iBAAiB,QAAQ,UAAU,YAAY;AAAA,IAAA;AAAA,EAEnD;AAEM,QAAA,IAAI,MAAM,gCAAgC;AAClD;ACnDA,MAAM,oBAAoB,CAAC,QAAgB;AAElC,SAAA,YAAY,KAAK,GAAG;AAC7B;AAEA,MAAM,YAAY,CAAC,EAAE,SAAS,UAAU,WAAW,GAAG,sBAAmC;AACvF,MAAI,OAAO,KAAK,eAAe,EAAE,SAAS,GAAG;AACnC,YAAA;AAAA,MACN;AAAA,IAAA;AAAA,EAEJ;AAEA,QAAM,SAAS;AAAA,IACb,GAAG;AAAA,IACH,GAAG;AAAA,IACH,aAAa,mBAAmB,EAAE,WAAW,GAAG,iBAAiB;AAAA,EAAA;AAG5D,SAAA,OAAO,MAAM,MAAM,gBAAgB,aAAa,CAAC,UAAU,KAAK,GAAG,MAAM;AAEzE,SAAA;AACT;AAEA,MAAe,QAAA;AAAA,EACb,KAAK,EAAE,SAAS,UAAU,WAAW,GAAG,mBAAgC;AAEhE,UAAA,SAAS,UAAU,EAAE,SAAS,UAAU,WAAW,GAAG,iBAAiB;AACvE,UAAA,WAAW,IAAI,SAAS,MAAM;AAC9B,UAAA,aAAa,WAAW,GAAG,SAAS,QAAQ,QAAQ,EAAE,CAAC,MAAM;AAE7D,UAAA,aAAa,CAAC,SAAe;AACjC,YAAM,OAAO,KAAK,OAAO,GAAG,KAAK,IAAI,MAAM;AACpC,aAAA,GAAG,UAAU,GAAG,IAAI,GAAG,KAAK,IAAI,GAAG,KAAK,GAAG;AAAA,IAAA;AAGpD,UAAM,SAAS,OAAO,MAAY,eAA+C,CAAA,MAAO;AAChF,YAAA,UAAU,WAAW,IAAI;AACzB,YAAA,YAAY,IAAI,OAAO;AAAA,QAC3B,QAAQ;AAAA,QACR,QAAQ;AAAA,UACN,QAAQ,OAAO,OAAO;AAAA,UACtB,KAAK;AAAA,UACL,MAAM,KAAK,UAAU,OAAO,KAAK,KAAK,QAAe,QAAQ;AAAA,UAC7D,KAAK,OAAO,OAAO;AAAA,UACnB,aAAa,KAAK;AAAA,UAClB,GAAG;AAAA,QACL;AAAA,MAAA,CACD;AAEKC,YAAAA,UAAU,MAAM,UAAU;AAE5B,UAAA,kBAAkBA,QAAO,QAAQ,GAAG;AACtC,aAAK,MAAM,UAAU,GAAG,OAAO,IAAI,OAAO,KAAKA,QAAO;AAAA,MAAA,OACjD;AAEA,aAAA,MAAM,WAAWA,QAAO,QAAQ;AAAA,MACvC;AAAA,IAAA;AAGK,WAAA;AAAA,MACL,YAAY;AACH,eAAA,OAAO,OAAO,QAAQ;AAAA,MAC/B;AAAA,MAEA,MAAM,aAAa,MAAY,cAA6C;AAEtE,YAAA,CAAC,gBAAgB,KAAK,KAAK,OAAO,OAAO,QAAQ,OAAO,GAAG;AACtD,iBAAA,EAAE,KAAK,KAAK;QACrB;AACM,cAAA,UAAU,WAAW,IAAI;AAE/B,cAAM,MAAM,MAAM;AAAA,UAChB;AAAA,UACA,IAAI,iBAAiB;AAAA,YACnB,QAAQ,OAAO,OAAO;AAAA,YACtB,KAAK;AAAA,YACL,GAAG;AAAA,UAAA,CACJ;AAAA,UACD;AAAA,YACE,WAAW,MAAM,KAAK,IAAI,CAAC,UAAU,kBAAkB,GAAG,MAAM;AAAA,UAClE;AAAA,QAAA;AAGF,eAAO,EAAE,IAAI;AAAA,MACf;AAAA,MACA,aAAa,MAAY,eAAe,IAAI;AACnC,eAAA,OAAO,MAAM,YAAY;AAAA,MAClC;AAAA,MACA,OAAO,MAAY,eAAe,IAAI;AAC7B,eAAA,OAAO,MAAM,YAAY;AAAA,MAClC;AAAA,MACA,OAAO,MAAY,eAAe,IAAwC;AAClE,cAAA,UAAU,IAAI,oBAAoB;AAAA,UACtC,QAAQ,OAAO,OAAO;AAAA,UACtB,KAAK,WAAW,IAAI;AAAA,UACpB,GAAG;AAAA,QAAA,CACJ;AACM,eAAA,SAAS,KAAK,OAAO;AAAA,MAC9B;AAAA,IAAA;AAAA,EAEJ;AACF;"}
|
package/dist/utils.d.ts
CHANGED
|
@@ -1,2 +1,5 @@
|
|
|
1
|
+
import type { AwsCredentialIdentity } from '@aws-sdk/types';
|
|
2
|
+
import type { InitOptions } from '.';
|
|
1
3
|
export declare function isUrlFromBucket(fileUrl: string, bucketName: string, baseUrl?: string): boolean;
|
|
4
|
+
export declare const extractCredentials: (options: InitOptions) => AwsCredentialIdentity;
|
|
2
5
|
//# sourceMappingURL=utils.d.ts.map
|
package/dist/utils.d.ts.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"utils.d.ts","sourceRoot":"","sources":["../src/utils.ts"],"names":[],"mappings":"
|
|
1
|
+
{"version":3,"file":"utils.d.ts","sourceRoot":"","sources":["../src/utils.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,qBAAqB,EAAE,MAAM,gBAAgB,CAAC;AAC5D,OAAO,KAAK,EAAE,WAAW,EAAE,MAAM,GAAG,CAAC;AASrC,wBAAgB,eAAe,CAAC,OAAO,EAAE,MAAM,EAAE,UAAU,EAAE,MAAM,EAAE,OAAO,SAAK,GAAG,OAAO,CAoB1F;AA6DD,eAAO,MAAM,kBAAkB,YAAa,WAAW,KAAG,qBA2BzD,CAAC"}
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@strapi/provider-upload-aws-s3",
|
|
3
|
-
"version": "4.15.
|
|
3
|
+
"version": "4.15.2",
|
|
4
4
|
"description": "AWS S3 provider for strapi upload",
|
|
5
5
|
"keywords": [
|
|
6
6
|
"upload",
|
|
@@ -46,18 +46,21 @@
|
|
|
46
46
|
"watch": "pack-up watch"
|
|
47
47
|
},
|
|
48
48
|
"dependencies": {
|
|
49
|
-
"aws-sdk": "
|
|
49
|
+
"@aws-sdk/client-s3": "3.433.0",
|
|
50
|
+
"@aws-sdk/lib-storage": "3.433.0",
|
|
51
|
+
"@aws-sdk/s3-request-presigner": "3.433.0",
|
|
52
|
+
"@aws-sdk/types": "3.433.0",
|
|
50
53
|
"lodash": "4.17.21"
|
|
51
54
|
},
|
|
52
55
|
"devDependencies": {
|
|
53
|
-
"@strapi/pack-up": "4.15.
|
|
56
|
+
"@strapi/pack-up": "4.15.2",
|
|
54
57
|
"@types/jest": "29.5.2",
|
|
55
|
-
"eslint-config-custom": "4.15.
|
|
56
|
-
"tsconfig": "4.15.
|
|
58
|
+
"eslint-config-custom": "4.15.2",
|
|
59
|
+
"tsconfig": "4.15.2"
|
|
57
60
|
},
|
|
58
61
|
"engines": {
|
|
59
62
|
"node": ">=18.0.0 <=20.x.x",
|
|
60
63
|
"npm": ">=6.0.0"
|
|
61
64
|
},
|
|
62
|
-
"gitHead": "
|
|
65
|
+
"gitHead": "bb510893332674e2607f7d335b34dc78c599319a"
|
|
63
66
|
}
|