@strapi/provider-upload-aws-s3 0.0.0-next.f5b09a8e61e059f02784478e27c310c6290be088 → 0.0.0-next.f698d55751345c4ca87477ef683475c1a68f310a

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/LICENSE CHANGED
@@ -2,7 +2,21 @@ Copyright (c) 2015-present Strapi Solutions SAS
2
2
 
3
3
  Portions of the Strapi software are licensed as follows:
4
4
 
5
- * All software that resides under an "ee/" directory (the “EE Software”), if that directory exists, is licensed under the license defined in "ee/LICENSE".
5
+ * All software that resides under an "ee/" directory (the “EE Software”), if that directory exists, is licensed under the license defined below.
6
+
7
+ Enterprise License
8
+
9
+ If you or the company you represent has entered into a written agreement referencing the Enterprise Edition of the Strapi source code available at
10
+ https://github.com/strapi/strapi, then such agreement applies to your use of the Enterprise Edition of the Strapi Software. If you or the company you
11
+ represent is using the Enterprise Edition of the Strapi Software in connection with a subscription to our cloud offering, then the agreement you have
12
+ agreed to with respect to our cloud offering and the licenses included in such agreement apply to your use of the Enterprise Edition of the Strapi Software.
13
+ Otherwise, the Strapi Enterprise Software License Agreement (found here https://strapi.io/enterprise-terms) applies to your use of the Enterprise Edition of the Strapi Software.
14
+
15
+ BY ACCESSING OR USING THE ENTERPRISE EDITION OF THE STRAPI SOFTWARE, YOU ARE AGREEING TO BE BOUND BY THE RELEVANT REFERENCED AGREEMENT.
16
+ IF YOU ARE NOT AUTHORIZED TO ACCEPT THESE TERMS ON BEHALF OF THE COMPANY YOU REPRESENT OR IF YOU DO NOT AGREE TO ALL OF THE RELEVANT TERMS AND CONDITIONS REFERENCED AND YOU
17
+ HAVE NOT OTHERWISE EXECUTED A WRITTEN AGREEMENT WITH STRAPI, YOU ARE NOT AUTHORIZED TO ACCESS OR USE OR ALLOW ANY USER TO ACCESS OR USE ANY PART OF
18
+ THE ENTERPRISE EDITION OF THE STRAPI SOFTWARE. YOUR ACCESS RIGHTS ARE CONDITIONAL ON YOUR CONSENT TO THE RELEVANT REFERENCED TERMS TO THE EXCLUSION OF ALL OTHER TERMS;
19
+ IF THE RELEVANT REFERENCED TERMS ARE CONSIDERED AN OFFER BY YOU, ACCEPTANCE IS EXPRESSLY LIMITED TO THE RELEVANT REFERENCED TERMS.
6
20
 
7
21
  * All software outside of the above-mentioned directories or restrictions above is available under the "MIT Expat" license as set forth below.
8
22
 
@@ -18,5 +32,6 @@ furnished to do so, subject to the following conditions:
18
32
  The above copyright notice and this permission notice shall be included in all
19
33
  copies or substantial portions of the Software.
20
34
 
21
- THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
22
- SOFTWARE.
35
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
36
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
37
+ WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
package/README.md CHANGED
@@ -49,8 +49,10 @@ module.exports = ({ env }) => ({
49
49
  baseUrl: env('CDN_URL'),
50
50
  rootPath: env('CDN_ROOT_PATH'),
51
51
  s3Options: {
52
- accessKeyId: env('AWS_ACCESS_KEY_ID'),
53
- secretAccessKey: env('AWS_ACCESS_SECRET'),
52
+ credentials: {
53
+ accessKeyId: env('AWS_ACCESS_KEY_ID'),
54
+ secretAccessKey: env('AWS_ACCESS_SECRET'),
55
+ },
54
56
  region: env('AWS_REGION'),
55
57
  params: {
56
58
  ACL: env('AWS_ACL', 'public-read'),
@@ -87,8 +89,10 @@ module.exports = ({ env }) => ({
87
89
  config: {
88
90
  provider: 'aws-s3',
89
91
  providerOptions: {
90
- accessKeyId: env('AWS_ACCESS_KEY_ID'),
91
- secretAccessKey: env('AWS_ACCESS_SECRET'),
92
+ credentials: {
93
+ accessKeyId: env('AWS_ACCESS_KEY_ID'),
94
+ secretAccessKey: env('AWS_ACCESS_SECRET'),
95
+ },
92
96
  region: env('AWS_REGION'),
93
97
  params: {
94
98
  ACL: 'private', // <== set ACL to private
@@ -109,7 +113,7 @@ module.exports = ({ env }) => ({
109
113
 
110
114
  #### Configuration for S3 compatible services
111
115
 
112
- This plugin may work with S3 compatible services by using the `endpoint` option instead of `region`. Scaleway example:
116
+ This plugin may work with S3 compatible services by using the `endpoint`. Scaleway example:
113
117
  `./config/plugins.js`
114
118
 
115
119
  ```js
@@ -119,9 +123,12 @@ module.exports = ({ env }) => ({
119
123
  config: {
120
124
  provider: 'aws-s3',
121
125
  providerOptions: {
122
- accessKeyId: env('SCALEWAY_ACCESS_KEY_ID'),
123
- secretAccessKey: env('SCALEWAY_ACCESS_SECRET'),
124
- endpoint: env('SCALEWAY_ENDPOINT'), // e.g. "s3.fr-par.scw.cloud"
126
+ credentials: {
127
+ accessKeyId: env('SCALEWAY_ACCESS_KEY_ID'),
128
+ secretAccessKey: env('SCALEWAY_ACCESS_SECRET'),
129
+ },
130
+ region: env('SCALEWAY_REGION'), // e.g "fr-par"
131
+ endpoint: env('SCALEWAY_ENDPOINT'), // e.g. "https://s3.fr-par.scw.cloud"
125
132
  params: {
126
133
  Bucket: env('SCALEWAY_BUCKET'),
127
134
  },
@@ -171,7 +178,7 @@ module.exports = [
171
178
  ];
172
179
  ```
173
180
 
174
- If you use dots in your bucket name, the url of the ressource is in directory style (`s3.yourRegion.amazonaws.com/your.bucket.name/image.jpg`) instead of `yourBucketName.s3.yourRegion.amazonaws.com/image.jpg`. Then only add `s3.yourRegion.amazonaws.com` to img-src and media-src directives.
181
+ If you use dots in your bucket name (`forcePathStyle set to false`), the url of the resource is in directory style (`s3.yourRegion.amazonaws.com/your.bucket.name/image.jpg`) instead of `yourBucketName.s3.yourRegion.amazonaws.com/image.jpg` so in that case the img-src and media-src directives to add will be `s3.yourRegion.amazonaws.com` without the bucket name in the url.
175
182
 
176
183
  ## Bucket CORS Configuration
177
184
 
@@ -202,3 +209,47 @@ These are the minimum amount of permissions needed for this provider to work.
202
209
  "s3:PutObjectAcl"
203
210
  ],
204
211
  ```
212
+
213
+ ## Update to AWS SDK V3 and URL Format Change
214
+
215
+ In the recent update of the `@strapi/provider-upload-aws-s3` plugin, we have transitioned from AWS SDK V2 to AWS SDK V3. This significant update brings along a change in the format of the URLs used in Amazon S3 services.
216
+
217
+ ### Understanding the New URL Format
218
+
219
+ AWS SDK V3 adopts the virtual-hosted–style URI format for S3 URLs. This format is recommended by AWS and is likely to become required in the near future, as the path-style URI is being deprecated. More details on this format can be found in the [AWS User Guide](https://docs.aws.amazon.com/AmazonS3/latest/userguide/VirtualHosting.html#virtual-hosted-style-access).
220
+
221
+ ### Why the Change?
222
+
223
+ The move to virtual-hosted–style URIs aligns with AWS's recommendation and future-proofing strategies. For an in-depth understanding of AWS's decision behind this transition, you can refer to their detailed post [here](https://aws.amazon.com/es/blogs/aws/amazon-s3-path-deprecation-plan-the-rest-of-the-story/).
224
+
225
+ ### Configuring Your Strapi Application
226
+
227
+ If you wish to continue using the plugin with Strapi 4.15.x versions or newer without changing your URL format, it's possible to specify your desired URL format directly in the plugin's configuration. Below is an example configuration highlighting the critical `baseUrl` property:
228
+
229
+ ```javascript
230
+ upload: {
231
+ config: {
232
+ provider: 'aws-s3',
233
+ providerOptions: {
234
+ credentials: {
235
+ accessKeyId: process.env.AWS_ACCESS_KEY_ID,
236
+ secretAccessKey: process.env.AWS_ACCESS_SECRET,
237
+ },
238
+ region: process.env.AWS_REGION,
239
+ baseUrl: `https://s3.${region}.amazonaws.com/${bucket}`, // This line sets the custom url format
240
+ params: {
241
+ ACL: process.env.AWS_ACL || 'public-read',
242
+ signedUrlExpires: process.env.AWS_SIGNED_URL_EXPIRES || 15 * 60,
243
+ Bucket: process.env.AWS_BUCKET,
244
+ },
245
+ },
246
+ actionOptions: {
247
+ upload: {},
248
+ uploadStream: {},
249
+ delete: {},
250
+ },
251
+ },
252
+ }
253
+ ```
254
+
255
+ This configuration ensures compatibility with the updated AWS SDK while providing flexibility in URL format selection, catering to various user needs.
package/dist/index.d.ts CHANGED
@@ -1,8 +1,9 @@
1
1
  /// <reference types="node" />
2
2
  /// <reference types="node" />
3
3
  import type { ReadStream } from 'node:fs';
4
- import AWS from 'aws-sdk';
5
- interface File {
4
+ import { DeleteObjectCommandOutput, CompleteMultipartUploadCommandOutput, AbortMultipartUploadCommandOutput, S3ClientConfig, ObjectCannedACL } from '@aws-sdk/client-s3';
5
+ import type { AwsCredentialIdentity } from '@aws-sdk/types';
6
+ export interface File {
6
7
  name: string;
7
8
  alternativeText?: string;
8
9
  caption?: string;
@@ -13,6 +14,7 @@ interface File {
13
14
  ext?: string;
14
15
  mime: string;
15
16
  size: number;
17
+ sizeInBytes: number;
16
18
  url: string;
17
19
  previewUrl?: string;
18
20
  path?: string;
@@ -21,26 +23,37 @@ interface File {
21
23
  stream?: ReadStream;
22
24
  buffer?: Buffer;
23
25
  }
24
- interface InitOptions extends Partial<AWS.S3.ClientConfiguration> {
26
+ export type UploadCommandOutput = (CompleteMultipartUploadCommandOutput | AbortMultipartUploadCommandOutput) & {
27
+ Location: string;
28
+ };
29
+ export interface AWSParams {
30
+ Bucket: string;
31
+ ACL?: ObjectCannedACL;
32
+ signedUrlExpires?: number;
33
+ }
34
+ export interface DefaultOptions extends S3ClientConfig {
35
+ accessKeyId?: AwsCredentialIdentity['accessKeyId'];
36
+ secretAccessKey?: AwsCredentialIdentity['secretAccessKey'];
37
+ credentials?: AwsCredentialIdentity;
38
+ params?: AWSParams;
39
+ [k: string]: any;
40
+ }
41
+ export type InitOptions = (DefaultOptions | {
42
+ s3Options: DefaultOptions;
43
+ }) & {
25
44
  baseUrl?: string;
26
45
  rootPath?: string;
27
- s3Options: AWS.S3.ClientConfiguration & {
28
- params: {
29
- Bucket: string;
30
- ACL?: string;
31
- signedUrlExpires?: string;
32
- };
33
- };
34
- }
46
+ [k: string]: any;
47
+ };
35
48
  declare const _default: {
36
49
  init({ baseUrl, rootPath, s3Options, ...legacyS3Options }: InitOptions): {
37
50
  isPrivate(): boolean;
38
- getSignedUrl(file: File): Promise<{
51
+ getSignedUrl(file: File, customParams: any): Promise<{
39
52
  url: string;
40
53
  }>;
41
54
  uploadStream(file: File, customParams?: {}): Promise<void>;
42
55
  upload(file: File, customParams?: {}): Promise<void>;
43
- delete(file: File, customParams?: {}): Promise<void>;
56
+ delete(file: File, customParams?: {}): Promise<DeleteObjectCommandOutput>;
44
57
  };
45
58
  };
46
59
  export default _default;
@@ -1 +1 @@
1
- {"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":";;AAAA,OAAO,KAAK,EAAE,UAAU,EAAE,MAAM,SAAS,CAAC;AAE1C,OAAO,GAAG,MAAM,SAAS,CAAC;AAG1B,UAAU,IAAI;IACZ,IAAI,EAAE,MAAM,CAAC;IACb,eAAe,CAAC,EAAE,MAAM,CAAC;IACzB,OAAO,CAAC,EAAE,MAAM,CAAC;IACjB,KAAK,CAAC,EAAE,MAAM,CAAC;IACf,MAAM,CAAC,EAAE,MAAM,CAAC;IAChB,OAAO,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;IAClC,IAAI,EAAE,MAAM,CAAC;IACb,GAAG,CAAC,EAAE,MAAM,CAAC;IACb,IAAI,EAAE,MAAM,CAAC;IACb,IAAI,EAAE,MAAM,CAAC;IACb,GAAG,EAAE,MAAM,CAAC;IACZ,UAAU,CAAC,EAAE,MAAM,CAAC;IACpB,IAAI,CAAC,EAAE,MAAM,CAAC;IACd,QAAQ,CAAC,EAAE,MAAM,CAAC;IAClB,iBAAiB,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;IAC5C,MAAM,CAAC,EAAE,UAAU,CAAC;IACpB,MAAM,CAAC,EAAE,MAAM,CAAC;CACjB;AAWD,UAAU,WAAY,SAAQ,OAAO,CAAC,GAAG,CAAC,EAAE,CAAC,mBAAmB,CAAC;IAC/D,OAAO,CAAC,EAAE,MAAM,CAAC;IACjB,QAAQ,CAAC,EAAE,MAAM,CAAC;IAClB,SAAS,EAAE,GAAG,CAAC,EAAE,CAAC,mBAAmB,GAAG;QACtC,MAAM,EAAE;YACN,MAAM,EAAE,MAAM,CAAC;YACf,GAAG,CAAC,EAAE,MAAM,CAAC;YACb,gBAAgB,CAAC,EAAE,MAAM,CAAC;SAC3B,CAAC;KACH,CAAC;CACH;;+DAG4D,WAAW;;2BAkEzC,IAAI,GAAG,QAAQ;YAAE,GAAG,EAAE,MAAM,CAAA;SAAE,CAAC;2BA2BrC,IAAI;qBAGV,IAAI;qBAGJ,IAAI,sBAAsB,QAAQ,IAAI,CAAC;;;AApG1D,wBA0HE"}
1
+ {"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":";;AAAA,OAAO,KAAK,EAAE,UAAU,EAAE,MAAM,SAAS,CAAC;AAE1C,OAAO,EAIL,yBAAyB,EAEzB,oCAAoC,EACpC,iCAAiC,EACjC,cAAc,EACd,eAAe,EAChB,MAAM,oBAAoB,CAAC;AAC5B,OAAO,KAAK,EAAE,qBAAqB,EAAE,MAAM,gBAAgB,CAAC;AAK5D,MAAM,WAAW,IAAI;IACnB,IAAI,EAAE,MAAM,CAAC;IACb,eAAe,CAAC,EAAE,MAAM,CAAC;IACzB,OAAO,CAAC,EAAE,MAAM,CAAC;IACjB,KAAK,CAAC,EAAE,MAAM,CAAC;IACf,MAAM,CAAC,EAAE,MAAM,CAAC;IAChB,OAAO,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;IAClC,IAAI,EAAE,MAAM,CAAC;IACb,GAAG,CAAC,EAAE,MAAM,CAAC;IACb,IAAI,EAAE,MAAM,CAAC;IACb,IAAI,EAAE,MAAM,CAAC;IACb,WAAW,EAAE,MAAM,CAAC;IACpB,GAAG,EAAE,MAAM,CAAC;IACZ,UAAU,CAAC,EAAE,MAAM,CAAC;IACpB,IAAI,CAAC,EAAE,MAAM,CAAC;IACd,QAAQ,CAAC,EAAE,MAAM,CAAC;IAClB,iBAAiB,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;IAC5C,MAAM,CAAC,EAAE,UAAU,CAAC;IACpB,MAAM,CAAC,EAAE,MAAM,CAAC;CACjB;AAED,MAAM,MAAM,mBAAmB,GAAG,CAC9B,oCAAoC,GACpC,iCAAiC,CACpC,GAAG;IACF,QAAQ,EAAE,MAAM,CAAC;CAClB,CAAC;AAEF,MAAM,WAAW,SAAS;IACxB,MAAM,EAAE,MAAM,CAAC;IACf,GAAG,CAAC,EAAE,eAAe,CAAC;IACtB,gBAAgB,CAAC,EAAE,MAAM,CAAC;CAC3B;AAED,MAAM,WAAW,cAAe,SAAQ,cAAc;IAEpD,WAAW,CAAC,EAAE,qBAAqB,CAAC,aAAa,CAAC,CAAC;IACnD,eAAe,CAAC,EAAE,qBAAqB,CAAC,iBAAiB,CAAC,CAAC;IAE3D,WAAW,CAAC,EAAE,qBAAqB,CAAC;IACpC,MAAM,CAAC,EAAE,SAAS,CAAC;IACnB,CAAC,CAAC,EAAE,MAAM,GAAG,GAAG,CAAC;CAClB;AAED,MAAM,MAAM,WAAW,GAAG,CAAC,cAAc,GAAG;IAAE,SAAS,EAAE,cAAc,CAAA;CAAE,CAAC,GAAG;IAC3E,OAAO,CAAC,EAAE,MAAM,CAAC;IACjB,QAAQ,CAAC,EAAE,MAAM,CAAC;IAClB,CAAC,CAAC,EAAE,MAAM,GAAG,GAAG,CAAC;CAClB,CAAC;;+DA0B2D,WAAW;;2BAwCzC,IAAI,gBAAgB,GAAG,GAAG,QAAQ;YAAE,GAAG,EAAE,MAAM,CAAA;SAAE,CAAC;2BAsBxD,IAAI;qBAGV,IAAI;qBAGJ,IAAI,sBAAsB,QAAQ,yBAAyB,CAAC;;;AArE/E,wBA+EE"}
package/dist/index.js CHANGED
@@ -1,8 +1,8 @@
1
1
  "use strict";
2
2
  const fp = require("lodash/fp");
3
- const AWS = require("aws-sdk");
4
- const _interopDefault = (e) => e && e.__esModule ? e : { default: e };
5
- const AWS__default = /* @__PURE__ */ _interopDefault(AWS);
3
+ const clientS3 = require("@aws-sdk/client-s3");
4
+ const s3RequestPresigner = require("@aws-sdk/s3-request-presigner");
5
+ const libStorage = require("@aws-sdk/lib-storage");
6
6
  const ENDPOINT_PATTERN = /^(.+\.)?s3[.-]([a-z0-9-]+)\./;
7
7
  function isUrlFromBucket(fileUrl, bucketName, baseUrl = "") {
8
8
  const url = new URL(fileUrl);
@@ -47,81 +47,84 @@ function getBucketFromAwsUrl(fileUrl) {
47
47
  }
48
48
  return { bucket: prefix.substring(0, prefix.length - 1) };
49
49
  }
50
- require("aws-sdk/lib/maintenance_mode_message").suppress = true;
51
- function hasUrlProtocol(url) {
50
+ const extractCredentials = (options) => {
51
+ if (options.s3Options?.credentials) {
52
+ return {
53
+ accessKeyId: options.s3Options.credentials.accessKeyId,
54
+ secretAccessKey: options.s3Options.credentials.secretAccessKey
55
+ };
56
+ }
57
+ return null;
58
+ };
59
+ const assertUrlProtocol = (url) => {
52
60
  return /^\w*:\/\//.test(url);
53
- }
61
+ };
62
+ const getConfig = ({ baseUrl, rootPath, s3Options, ...legacyS3Options }) => {
63
+ if (Object.keys(legacyS3Options).length > 0) {
64
+ process.emitWarning(
65
+ "S3 configuration options passed at root level of the plugin's providerOptions is deprecated and will be removed in a future release. Please wrap them inside the 's3Options:{}' property."
66
+ );
67
+ }
68
+ const credentials = extractCredentials({ s3Options, ...legacyS3Options });
69
+ const config = {
70
+ ...s3Options,
71
+ ...legacyS3Options,
72
+ ...credentials ? { credentials } : {}
73
+ };
74
+ config.params.ACL = fp.getOr(clientS3.ObjectCannedACL.public_read, ["params", "ACL"], config);
75
+ return config;
76
+ };
54
77
  const index = {
55
78
  init({ baseUrl, rootPath, s3Options, ...legacyS3Options }) {
56
- if (Object.keys(legacyS3Options).length > 0) {
57
- process.emitWarning(
58
- "S3 configuration options passed at root level of the plugin's providerOptions is deprecated and will be removed in a future release. Please wrap them inside the 's3Options:{}' property."
59
- );
60
- }
61
- const config = { ...s3Options, ...legacyS3Options };
62
- const S3 = new AWS__default.default.S3({
63
- apiVersion: "2006-03-01",
64
- ...config
65
- });
79
+ const config = getConfig({ baseUrl, rootPath, s3Options, ...legacyS3Options });
80
+ const s3Client = new clientS3.S3Client(config);
66
81
  const filePrefix = rootPath ? `${rootPath.replace(/\/+$/, "")}/` : "";
67
82
  const getFileKey = (file) => {
68
83
  const path = file.path ? `${file.path}/` : "";
69
84
  return `${filePrefix}${path}${file.hash}${file.ext}`;
70
85
  };
71
- const ACL = fp.getOr("public-read", ["params", "ACL"], config);
72
- const upload = (file, customParams = {}) => new Promise((resolve, reject) => {
86
+ const upload = async (file, customParams = {}) => {
73
87
  const fileKey = getFileKey(file);
74
- if (!file.stream && !file.buffer) {
75
- reject(new Error("Missing file stream or buffer"));
76
- return;
77
- }
78
- const params = {
79
- Key: fileKey,
80
- Bucket: config.params.Bucket,
81
- Body: file.stream || file.buffer,
82
- ACL,
83
- ContentType: file.mime,
84
- ...customParams
85
- };
86
- const onUploaded = (err, data) => {
87
- if (err) {
88
- return reject(err);
89
- }
90
- if (baseUrl) {
91
- file.url = `${baseUrl}/${fileKey}`;
92
- } else {
93
- file.url = hasUrlProtocol(data.Location) ? data.Location : `https://${data.Location}`;
88
+ const uploadObj = new libStorage.Upload({
89
+ client: s3Client,
90
+ params: {
91
+ Bucket: config.params.Bucket,
92
+ Key: fileKey,
93
+ Body: file.stream || Buffer.from(file.buffer, "binary"),
94
+ ACL: config.params.ACL,
95
+ ContentType: file.mime,
96
+ ...customParams
94
97
  }
95
- resolve();
96
- };
97
- S3.upload(params, onUploaded);
98
- });
98
+ });
99
+ const upload2 = await uploadObj.done();
100
+ if (assertUrlProtocol(upload2.Location)) {
101
+ file.url = baseUrl ? `${baseUrl}/${fileKey}` : upload2.Location;
102
+ } else {
103
+ file.url = `https://${upload2.Location}`;
104
+ }
105
+ };
99
106
  return {
100
107
  isPrivate() {
101
- return ACL === "private";
108
+ return config.params.ACL === "private";
102
109
  },
103
- async getSignedUrl(file) {
110
+ async getSignedUrl(file, customParams) {
104
111
  if (!isUrlFromBucket(file.url, config.params.Bucket, baseUrl)) {
105
112
  return { url: file.url };
106
113
  }
107
- const signedUrlExpires = fp.getOr(15 * 60, ["params", "signedUrlExpires"], config);
108
- return new Promise((resolve, reject) => {
109
- const fileKey = getFileKey(file);
110
- S3.getSignedUrl(
111
- "getObject",
112
- {
113
- Bucket: config.params.Bucket,
114
- Key: fileKey,
115
- Expires: parseInt(signedUrlExpires, 10)
116
- },
117
- (err, url) => {
118
- if (err) {
119
- return reject(err);
120
- }
121
- resolve({ url });
122
- }
123
- );
124
- });
114
+ const fileKey = getFileKey(file);
115
+ const url = await s3RequestPresigner.getSignedUrl(
116
+ // @ts-expect-error - TODO fix client type
117
+ s3Client,
118
+ new clientS3.GetObjectCommand({
119
+ Bucket: config.params.Bucket,
120
+ Key: fileKey,
121
+ ...customParams
122
+ }),
123
+ {
124
+ expiresIn: fp.getOr(15 * 60, ["params", "signedUrlExpires"], config)
125
+ }
126
+ );
127
+ return { url };
125
128
  },
126
129
  uploadStream(file, customParams = {}) {
127
130
  return upload(file, customParams);
@@ -130,22 +133,12 @@ const index = {
130
133
  return upload(file, customParams);
131
134
  },
132
135
  delete(file, customParams = {}) {
133
- return new Promise((resolve, reject) => {
134
- const fileKey = getFileKey(file);
135
- S3.deleteObject(
136
- {
137
- Key: fileKey,
138
- Bucket: config.params.Bucket,
139
- ...customParams
140
- },
141
- (err) => {
142
- if (err) {
143
- return reject(err);
144
- }
145
- resolve();
146
- }
147
- );
136
+ const command = new clientS3.DeleteObjectCommand({
137
+ Bucket: config.params.Bucket,
138
+ Key: getFileKey(file),
139
+ ...customParams
148
140
  });
141
+ return s3Client.send(command);
149
142
  }
150
143
  };
151
144
  }
package/dist/index.js.map CHANGED
@@ -1 +1 @@
1
- {"version":3,"file":"index.js","sources":["../src/utils.ts","../src/index.ts"],"sourcesContent":["const ENDPOINT_PATTERN = /^(.+\\.)?s3[.-]([a-z0-9-]+)\\./;\n\ninterface BucketInfo {\n bucket?: string | null;\n err?: string;\n}\n\nexport function isUrlFromBucket(fileUrl: string, bucketName: string, baseUrl = ''): boolean {\n const url = new URL(fileUrl);\n\n // Check if the file URL is using a base URL (e.g. a CDN).\n // In this case do not sign the URL.\n if (baseUrl) {\n return false;\n }\n\n const { bucket } = getBucketFromAwsUrl(fileUrl);\n\n if (bucket) {\n return bucket === bucketName;\n }\n\n // File URL might be of an S3-compatible provider. (or an invalid URL)\n // In this case, check if the bucket name appears in the URL host or path.\n // e.g. https://minio.example.com/bucket-name/object-key\n // e.g. https://bucket.nyc3.digitaloceanspaces.com/folder/img.png\n return url.host.startsWith(`${bucketName}.`) || url.pathname.includes(`/${bucketName}/`);\n}\n\n/**\n * Parse the bucket name from a URL.\n * See all URL formats in https://docs.aws.amazon.com/AmazonS3/latest/userguide/access-bucket-intro.html\n *\n * @param {string} fileUrl - the URL to parse\n * @returns {object} result\n * @returns {string} result.bucket - the bucket name\n * @returns {string} result.err - if any\n */\nfunction getBucketFromAwsUrl(fileUrl: string): BucketInfo {\n const url = new URL(fileUrl);\n\n // S3://<bucket-name>/<key>\n if (url.protocol === 's3:') {\n const bucket = url.host;\n\n if (!bucket) {\n return { err: `Invalid S3 url: no bucket: ${url}` };\n }\n return { bucket };\n }\n\n if (!url.host) {\n return { err: `Invalid S3 url: no hostname: ${url}` };\n }\n\n const matches = url.host.match(ENDPOINT_PATTERN);\n if (!matches) {\n return { err: `Invalid S3 url: hostname does not appear to be a valid S3 endpoint: ${url}` };\n }\n\n const prefix = matches[1];\n // https://s3.amazonaws.com/<bucket-name>\n if (!prefix) {\n if (url.pathname === '/') {\n return { bucket: null };\n }\n\n const index = url.pathname.indexOf('/', 1);\n\n // https://s3.amazonaws.com/<bucket-name>\n if (index === -1) {\n return { bucket: url.pathname.substring(1) };\n }\n\n // https://s3.amazonaws.com/<bucket-name>/\n if (index === url.pathname.length - 1) {\n return { bucket: url.pathname.substring(1, index) };\n }\n\n // https://s3.amazonaws.com/<bucket-name>/key\n return { bucket: url.pathname.substring(1, index) };\n }\n\n // https://<bucket-name>.s3.amazonaws.com/\n return { bucket: prefix.substring(0, prefix.length - 1) };\n}\n","import type { ReadStream } from 'node:fs';\nimport { getOr } from 'lodash/fp';\nimport AWS from 'aws-sdk';\nimport { isUrlFromBucket } from './utils';\n\ninterface File {\n name: string;\n alternativeText?: string;\n caption?: string;\n width?: number;\n height?: number;\n formats?: Record<string, unknown>;\n hash: string;\n ext?: string;\n mime: string;\n size: number;\n url: string;\n previewUrl?: string;\n path?: string;\n provider?: string;\n provider_metadata?: Record<string, unknown>;\n stream?: ReadStream;\n buffer?: Buffer;\n}\n\n// TODO V5: Migrate to aws-sdk v3\n// eslint-disable-next-line @typescript-eslint/no-var-requires\nrequire('aws-sdk/lib/maintenance_mode_message').suppress = true;\n\nfunction hasUrlProtocol(url: string) {\n // Regex to test protocol like \"http://\", \"https://\"\n return /^\\w*:\\/\\//.test(url);\n}\n\ninterface InitOptions extends Partial<AWS.S3.ClientConfiguration> {\n baseUrl?: string;\n rootPath?: string;\n s3Options: AWS.S3.ClientConfiguration & {\n params: {\n Bucket: string; // making it required\n ACL?: string;\n signedUrlExpires?: string;\n };\n };\n}\n\nexport default {\n init({ baseUrl, rootPath, s3Options, ...legacyS3Options }: InitOptions) {\n if (Object.keys(legacyS3Options).length > 0) {\n process.emitWarning(\n \"S3 configuration options passed at root level of the plugin's providerOptions is deprecated and will be removed in a future release. Please wrap them inside the 's3Options:{}' property.\"\n );\n }\n\n const config = { ...s3Options, ...legacyS3Options };\n\n const S3 = new AWS.S3({\n apiVersion: '2006-03-01',\n ...config,\n });\n\n const filePrefix = rootPath ? `${rootPath.replace(/\\/+$/, '')}/` : '';\n\n const getFileKey = (file: File) => {\n const path = file.path ? `${file.path}/` : '';\n\n return `${filePrefix}${path}${file.hash}${file.ext}`;\n };\n\n const ACL = getOr('public-read', ['params', 'ACL'], config);\n\n const upload = (file: File, customParams = {}): Promise<void> =>\n new Promise((resolve, reject) => {\n const fileKey = getFileKey(file);\n\n if (!file.stream && !file.buffer) {\n reject(new Error('Missing file stream or buffer'));\n return;\n }\n\n const params = {\n Key: fileKey,\n Bucket: config.params.Bucket,\n Body: file.stream || file.buffer,\n ACL,\n ContentType: file.mime,\n ...customParams,\n };\n\n const onUploaded = (err: Error, data: AWS.S3.ManagedUpload.SendData) => {\n if (err) {\n return reject(err);\n }\n\n // set the bucket file url\n if (baseUrl) {\n // Construct the url with the baseUrl\n file.url = `${baseUrl}/${fileKey}`;\n } else {\n // Add the protocol if it is missing\n // Some providers like DigitalOcean Spaces return the url without the protocol\n file.url = hasUrlProtocol(data.Location) ? data.Location : `https://${data.Location}`;\n }\n resolve();\n };\n\n S3.upload(params, onUploaded);\n });\n\n return {\n isPrivate() {\n return ACL === 'private';\n },\n async getSignedUrl(file: File): Promise<{ url: string }> {\n // Do not sign the url if it does not come from the same bucket.\n if (!isUrlFromBucket(file.url, config.params.Bucket, baseUrl)) {\n return { url: file.url };\n }\n\n const signedUrlExpires: string = getOr(15 * 60, ['params', 'signedUrlExpires'], config); // 15 minutes\n\n return new Promise((resolve, reject) => {\n const fileKey = getFileKey(file);\n\n S3.getSignedUrl(\n 'getObject',\n {\n Bucket: config.params.Bucket,\n Key: fileKey,\n Expires: parseInt(signedUrlExpires, 10),\n },\n (err, url) => {\n if (err) {\n return reject(err);\n }\n resolve({ url });\n }\n );\n });\n },\n uploadStream(file: File, customParams = {}) {\n return upload(file, customParams);\n },\n upload(file: File, customParams = {}) {\n return upload(file, customParams);\n },\n delete(file: File, customParams = {}): Promise<void> {\n return new Promise((resolve, reject) => {\n // delete file on S3 bucket\n const fileKey = getFileKey(file);\n S3.deleteObject(\n {\n Key: fileKey,\n Bucket: config.params.Bucket,\n ...customParams,\n },\n (err) => {\n if (err) {\n return reject(err);\n }\n\n resolve();\n }\n );\n });\n },\n };\n },\n};\n"],"names":["index","AWS","getOr"],"mappings":";;;;;AAAA,MAAM,mBAAmB;AAOlB,SAAS,gBAAgB,SAAiB,YAAoB,UAAU,IAAa;AACpF,QAAA,MAAM,IAAI,IAAI,OAAO;AAI3B,MAAI,SAAS;AACJ,WAAA;AAAA,EACT;AAEA,QAAM,EAAE,OAAA,IAAW,oBAAoB,OAAO;AAE9C,MAAI,QAAQ;AACV,WAAO,WAAW;AAAA,EACpB;AAMA,SAAO,IAAI,KAAK,WAAW,GAAG,UAAU,GAAG,KAAK,IAAI,SAAS,SAAS,IAAI,UAAU,GAAG;AACzF;AAWA,SAAS,oBAAoB,SAA6B;AAClD,QAAA,MAAM,IAAI,IAAI,OAAO;AAGvB,MAAA,IAAI,aAAa,OAAO;AAC1B,UAAM,SAAS,IAAI;AAEnB,QAAI,CAAC,QAAQ;AACX,aAAO,EAAE,KAAK,8BAA8B,GAAG,GAAG;AAAA,IACpD;AACA,WAAO,EAAE,OAAO;AAAA,EAClB;AAEI,MAAA,CAAC,IAAI,MAAM;AACb,WAAO,EAAE,KAAK,gCAAgC,GAAG,GAAG;AAAA,EACtD;AAEA,QAAM,UAAU,IAAI,KAAK,MAAM,gBAAgB;AAC/C,MAAI,CAAC,SAAS;AACZ,WAAO,EAAE,KAAK,uEAAuE,GAAG,GAAG;AAAA,EAC7F;AAEM,QAAA,SAAS,QAAQ,CAAC;AAExB,MAAI,CAAC,QAAQ;AACP,QAAA,IAAI,aAAa,KAAK;AACjB,aAAA,EAAE,QAAQ;IACnB;AAEA,UAAMA,SAAQ,IAAI,SAAS,QAAQ,KAAK,CAAC;AAGzC,QAAIA,WAAU,IAAI;AAChB,aAAO,EAAE,QAAQ,IAAI,SAAS,UAAU,CAAC;IAC3C;AAGA,QAAIA,WAAU,IAAI,SAAS,SAAS,GAAG;AACrC,aAAO,EAAE,QAAQ,IAAI,SAAS,UAAU,GAAGA,MAAK;IAClD;AAGA,WAAO,EAAE,QAAQ,IAAI,SAAS,UAAU,GAAGA,MAAK;EAClD;AAGO,SAAA,EAAE,QAAQ,OAAO,UAAU,GAAG,OAAO,SAAS,CAAC;AACxD;AC1DA,QAAQ,sCAAsC,EAAE,WAAW;AAE3D,SAAS,eAAe,KAAa;AAE5B,SAAA,YAAY,KAAK,GAAG;AAC7B;AAcA,MAAe,QAAA;AAAA,EACb,KAAK,EAAE,SAAS,UAAU,WAAW,GAAG,mBAAgC;AACtE,QAAI,OAAO,KAAK,eAAe,EAAE,SAAS,GAAG;AACnC,cAAA;AAAA,QACN;AAAA,MAAA;AAAA,IAEJ;AAEA,UAAM,SAAS,EAAE,GAAG,WAAW,GAAG,gBAAgB;AAE5C,UAAA,KAAK,IAAIC,aAAA,QAAI,GAAG;AAAA,MACpB,YAAY;AAAA,MACZ,GAAG;AAAA,IAAA,CACJ;AAEK,UAAA,aAAa,WAAW,GAAG,SAAS,QAAQ,QAAQ,EAAE,CAAC,MAAM;AAE7D,UAAA,aAAa,CAAC,SAAe;AACjC,YAAM,OAAO,KAAK,OAAO,GAAG,KAAK,IAAI,MAAM;AAEpC,aAAA,GAAG,UAAU,GAAG,IAAI,GAAG,KAAK,IAAI,GAAG,KAAK,GAAG;AAAA,IAAA;AAGpD,UAAM,MAAMC,GAAAA,MAAM,eAAe,CAAC,UAAU,KAAK,GAAG,MAAM;AAEpD,UAAA,SAAS,CAAC,MAAY,eAAe,OACzC,IAAI,QAAQ,CAAC,SAAS,WAAW;AACzB,YAAA,UAAU,WAAW,IAAI;AAE/B,UAAI,CAAC,KAAK,UAAU,CAAC,KAAK,QAAQ;AACzB,eAAA,IAAI,MAAM,+BAA+B,CAAC;AACjD;AAAA,MACF;AAEA,YAAM,SAAS;AAAA,QACb,KAAK;AAAA,QACL,QAAQ,OAAO,OAAO;AAAA,QACtB,MAAM,KAAK,UAAU,KAAK;AAAA,QAC1B;AAAA,QACA,aAAa,KAAK;AAAA,QAClB,GAAG;AAAA,MAAA;AAGC,YAAA,aAAa,CAAC,KAAY,SAAwC;AACtE,YAAI,KAAK;AACP,iBAAO,OAAO,GAAG;AAAA,QACnB;AAGA,YAAI,SAAS;AAEX,eAAK,MAAM,GAAG,OAAO,IAAI,OAAO;AAAA,QAAA,OAC3B;AAGA,eAAA,MAAM,eAAe,KAAK,QAAQ,IAAI,KAAK,WAAW,WAAW,KAAK,QAAQ;AAAA,QACrF;AACQ;MAAA;AAGP,SAAA,OAAO,QAAQ,UAAU;AAAA,IAAA,CAC7B;AAEI,WAAA;AAAA,MACL,YAAY;AACV,eAAO,QAAQ;AAAA,MACjB;AAAA,MACA,MAAM,aAAa,MAAsC;AAEnD,YAAA,CAAC,gBAAgB,KAAK,KAAK,OAAO,OAAO,QAAQ,OAAO,GAAG;AACtD,iBAAA,EAAE,KAAK,KAAK;QACrB;AAEM,cAAA,mBAA2BA,SAAM,KAAK,IAAI,CAAC,UAAU,kBAAkB,GAAG,MAAM;AAEtF,eAAO,IAAI,QAAQ,CAAC,SAAS,WAAW;AAChC,gBAAA,UAAU,WAAW,IAAI;AAE5B,aAAA;AAAA,YACD;AAAA,YACA;AAAA,cACE,QAAQ,OAAO,OAAO;AAAA,cACtB,KAAK;AAAA,cACL,SAAS,SAAS,kBAAkB,EAAE;AAAA,YACxC;AAAA,YACA,CAAC,KAAK,QAAQ;AACZ,kBAAI,KAAK;AACP,uBAAO,OAAO,GAAG;AAAA,cACnB;AACQ,sBAAA,EAAE,KAAK;AAAA,YACjB;AAAA,UAAA;AAAA,QACF,CACD;AAAA,MACH;AAAA,MACA,aAAa,MAAY,eAAe,IAAI;AACnC,eAAA,OAAO,MAAM,YAAY;AAAA,MAClC;AAAA,MACA,OAAO,MAAY,eAAe,IAAI;AAC7B,eAAA,OAAO,MAAM,YAAY;AAAA,MAClC;AAAA,MACA,OAAO,MAAY,eAAe,IAAmB;AACnD,eAAO,IAAI,QAAQ,CAAC,SAAS,WAAW;AAEhC,gBAAA,UAAU,WAAW,IAAI;AAC5B,aAAA;AAAA,YACD;AAAA,cACE,KAAK;AAAA,cACL,QAAQ,OAAO,OAAO;AAAA,cACtB,GAAG;AAAA,YACL;AAAA,YACA,CAAC,QAAQ;AACP,kBAAI,KAAK;AACP,uBAAO,OAAO,GAAG;AAAA,cACnB;AAEQ;YACV;AAAA,UAAA;AAAA,QACF,CACD;AAAA,MACH;AAAA,IAAA;AAAA,EAEJ;AACF;;"}
1
+ {"version":3,"file":"index.js","sources":["../src/utils.ts","../src/index.ts"],"sourcesContent":["import type { AwsCredentialIdentity } from '@aws-sdk/types';\nimport type { InitOptions } from '.';\n\nconst ENDPOINT_PATTERN = /^(.+\\.)?s3[.-]([a-z0-9-]+)\\./;\n\ninterface BucketInfo {\n bucket?: string | null;\n err?: string;\n}\n\nexport function isUrlFromBucket(fileUrl: string, bucketName: string, baseUrl = ''): boolean {\n const url = new URL(fileUrl);\n\n // Check if the file URL is using a base URL (e.g. a CDN).\n // In this case do not sign the URL.\n if (baseUrl) {\n return false;\n }\n\n const { bucket } = getBucketFromAwsUrl(fileUrl);\n\n if (bucket) {\n return bucket === bucketName;\n }\n\n // File URL might be of an S3-compatible provider. (or an invalid URL)\n // In this case, check if the bucket name appears in the URL host or path.\n // e.g. https://minio.example.com/bucket-name/object-key\n // e.g. https://bucket.nyc3.digitaloceanspaces.com/folder/img.png\n return url.host.startsWith(`${bucketName}.`) || url.pathname.includes(`/${bucketName}/`);\n}\n\n/**\n * Parse the bucket name from a URL.\n * See all URL formats in https://docs.aws.amazon.com/AmazonS3/latest/userguide/access-bucket-intro.html\n *\n * @param {string} fileUrl - the URL to parse\n * @returns {object} result\n * @returns {string} result.bucket - the bucket name\n * @returns {string} result.err - if any\n */\nfunction getBucketFromAwsUrl(fileUrl: string): BucketInfo {\n const url = new URL(fileUrl);\n\n // S3://<bucket-name>/<key>\n if (url.protocol === 's3:') {\n const bucket = url.host;\n\n if (!bucket) {\n return { err: `Invalid S3 url: no bucket: ${url}` };\n }\n return { bucket };\n }\n\n if (!url.host) {\n return { err: `Invalid S3 url: no hostname: ${url}` };\n }\n\n const matches = url.host.match(ENDPOINT_PATTERN);\n if (!matches) {\n return { err: `Invalid S3 url: hostname does not appear to be a valid S3 endpoint: ${url}` };\n }\n\n const prefix = matches[1];\n // https://s3.amazonaws.com/<bucket-name>\n if (!prefix) {\n if (url.pathname === '/') {\n return { bucket: null };\n }\n\n const index = url.pathname.indexOf('/', 1);\n\n // https://s3.amazonaws.com/<bucket-name>\n if (index === -1) {\n return { bucket: url.pathname.substring(1) };\n }\n\n // https://s3.amazonaws.com/<bucket-name>/\n if (index === url.pathname.length - 1) {\n return { bucket: url.pathname.substring(1, index) };\n }\n\n // https://s3.amazonaws.com/<bucket-name>/key\n return { bucket: url.pathname.substring(1, index) };\n }\n\n // https://<bucket-name>.s3.amazonaws.com/\n return { bucket: prefix.substring(0, prefix.length - 1) };\n}\n\nexport const extractCredentials = (options: InitOptions): AwsCredentialIdentity | null => {\n if (options.s3Options?.credentials) {\n return {\n accessKeyId: options.s3Options.credentials.accessKeyId,\n secretAccessKey: options.s3Options.credentials.secretAccessKey,\n };\n }\n return null;\n};\n","import type { ReadStream } from 'node:fs';\nimport { getOr } from 'lodash/fp';\nimport {\n S3Client,\n GetObjectCommand,\n DeleteObjectCommand,\n DeleteObjectCommandOutput,\n PutObjectCommandInput,\n CompleteMultipartUploadCommandOutput,\n AbortMultipartUploadCommandOutput,\n S3ClientConfig,\n ObjectCannedACL,\n} from '@aws-sdk/client-s3';\nimport type { AwsCredentialIdentity } from '@aws-sdk/types';\nimport { getSignedUrl } from '@aws-sdk/s3-request-presigner';\nimport { Upload } from '@aws-sdk/lib-storage';\nimport { extractCredentials, isUrlFromBucket } from './utils';\n\nexport interface File {\n name: string;\n alternativeText?: string;\n caption?: string;\n width?: number;\n height?: number;\n formats?: Record<string, unknown>;\n hash: string;\n ext?: string;\n mime: string;\n size: number;\n sizeInBytes: number;\n url: string;\n previewUrl?: string;\n path?: string;\n provider?: string;\n provider_metadata?: Record<string, unknown>;\n stream?: ReadStream;\n buffer?: Buffer;\n}\n\nexport type UploadCommandOutput = (\n | CompleteMultipartUploadCommandOutput\n | AbortMultipartUploadCommandOutput\n) & {\n Location: string;\n};\n\nexport interface AWSParams {\n Bucket: string; // making it required\n ACL?: ObjectCannedACL;\n signedUrlExpires?: number;\n}\n\nexport interface DefaultOptions extends S3ClientConfig {\n // TODO Remove this in V5\n accessKeyId?: AwsCredentialIdentity['accessKeyId'];\n secretAccessKey?: AwsCredentialIdentity['secretAccessKey'];\n // Keep this for V5\n credentials?: AwsCredentialIdentity;\n params?: AWSParams;\n [k: string]: any;\n}\n\nexport type InitOptions = (DefaultOptions | { s3Options: DefaultOptions }) & {\n baseUrl?: string;\n rootPath?: string;\n [k: string]: any;\n};\n\nconst assertUrlProtocol = (url: string) => {\n // Regex to test protocol like \"http://\", \"https://\"\n return /^\\w*:\\/\\//.test(url);\n};\n\nconst getConfig = ({ baseUrl, rootPath, s3Options, ...legacyS3Options }: InitOptions) => {\n if (Object.keys(legacyS3Options).length > 0) {\n process.emitWarning(\n \"S3 configuration options passed at root level of the plugin's providerOptions is deprecated and will be removed in a future release. Please wrap them inside the 's3Options:{}' property.\"\n );\n }\n const credentials = extractCredentials({ s3Options, ...legacyS3Options });\n const config = {\n ...s3Options,\n ...legacyS3Options,\n ...(credentials ? { credentials } : {}),\n };\n\n config.params.ACL = getOr(ObjectCannedACL.public_read, ['params', 'ACL'], config);\n\n return config;\n};\n\nexport default {\n init({ baseUrl, rootPath, s3Options, ...legacyS3Options }: InitOptions) {\n // TODO V5 change config structure to avoid having to do this\n const config = getConfig({ baseUrl, rootPath, s3Options, ...legacyS3Options });\n const s3Client = new S3Client(config);\n const filePrefix = rootPath ? `${rootPath.replace(/\\/+$/, '')}/` : '';\n\n const getFileKey = (file: File) => {\n const path = file.path ? `${file.path}/` : '';\n return `${filePrefix}${path}${file.hash}${file.ext}`;\n };\n\n const upload = async (file: File, customParams: Partial<PutObjectCommandInput> = {}) => {\n const fileKey = getFileKey(file);\n const uploadObj = new Upload({\n client: s3Client,\n params: {\n Bucket: config.params.Bucket,\n Key: fileKey,\n Body: file.stream || Buffer.from(file.buffer as any, 'binary'),\n ACL: config.params.ACL,\n ContentType: file.mime,\n ...customParams,\n },\n });\n\n const upload = (await uploadObj.done()) as UploadCommandOutput;\n\n if (assertUrlProtocol(upload.Location)) {\n file.url = baseUrl ? `${baseUrl}/${fileKey}` : upload.Location;\n } else {\n // Default protocol to https protocol\n file.url = `https://${upload.Location}`;\n }\n };\n\n return {\n isPrivate() {\n return config.params.ACL === 'private';\n },\n\n async getSignedUrl(file: File, customParams: any): Promise<{ url: string }> {\n // Do not sign the url if it does not come from the same bucket.\n if (!isUrlFromBucket(file.url, config.params.Bucket, baseUrl)) {\n return { url: file.url };\n }\n const fileKey = getFileKey(file);\n\n const url = await getSignedUrl(\n // @ts-expect-error - TODO fix client type\n s3Client,\n new GetObjectCommand({\n Bucket: config.params.Bucket,\n Key: fileKey,\n ...customParams,\n }),\n {\n expiresIn: getOr(15 * 60, ['params', 'signedUrlExpires'], config),\n }\n );\n\n return { url };\n },\n uploadStream(file: File, customParams = {}) {\n return upload(file, customParams);\n },\n upload(file: File, customParams = {}) {\n return upload(file, customParams);\n },\n delete(file: File, customParams = {}): Promise<DeleteObjectCommandOutput> {\n const command = new DeleteObjectCommand({\n Bucket: config.params.Bucket,\n Key: getFileKey(file),\n ...customParams,\n });\n return s3Client.send(command);\n },\n };\n },\n};\n"],"names":["index","getOr","ObjectCannedACL","S3Client","Upload","upload","getSignedUrl","GetObjectCommand","DeleteObjectCommand"],"mappings":";;;;;AAGA,MAAM,mBAAmB;AAOlB,SAAS,gBAAgB,SAAiB,YAAoB,UAAU,IAAa;AACpF,QAAA,MAAM,IAAI,IAAI,OAAO;AAI3B,MAAI,SAAS;AACJ,WAAA;AAAA,EAAA;AAGT,QAAM,EAAE,OAAA,IAAW,oBAAoB,OAAO;AAE9C,MAAI,QAAQ;AACV,WAAO,WAAW;AAAA,EAAA;AAOpB,SAAO,IAAI,KAAK,WAAW,GAAG,UAAU,GAAG,KAAK,IAAI,SAAS,SAAS,IAAI,UAAU,GAAG;AACzF;AAWA,SAAS,oBAAoB,SAA6B;AAClD,QAAA,MAAM,IAAI,IAAI,OAAO;AAGvB,MAAA,IAAI,aAAa,OAAO;AAC1B,UAAM,SAAS,IAAI;AAEnB,QAAI,CAAC,QAAQ;AACX,aAAO,EAAE,KAAK,8BAA8B,GAAG,GAAG;AAAA,IAAA;AAEpD,WAAO,EAAE,OAAO;AAAA,EAAA;AAGd,MAAA,CAAC,IAAI,MAAM;AACb,WAAO,EAAE,KAAK,gCAAgC,GAAG,GAAG;AAAA,EAAA;AAGtD,QAAM,UAAU,IAAI,KAAK,MAAM,gBAAgB;AAC/C,MAAI,CAAC,SAAS;AACZ,WAAO,EAAE,KAAK,uEAAuE,GAAG,GAAG;AAAA,EAAA;AAGvF,QAAA,SAAS,QAAQ,CAAC;AAExB,MAAI,CAAC,QAAQ;AACP,QAAA,IAAI,aAAa,KAAK;AACjB,aAAA,EAAE,QAAQ,KAAK;AAAA,IAAA;AAGxB,UAAMA,SAAQ,IAAI,SAAS,QAAQ,KAAK,CAAC;AAGzC,QAAIA,WAAU,IAAI;AAChB,aAAO,EAAE,QAAQ,IAAI,SAAS,UAAU,CAAC,EAAE;AAAA,IAAA;AAI7C,QAAIA,WAAU,IAAI,SAAS,SAAS,GAAG;AACrC,aAAO,EAAE,QAAQ,IAAI,SAAS,UAAU,GAAGA,MAAK,EAAE;AAAA,IAAA;AAIpD,WAAO,EAAE,QAAQ,IAAI,SAAS,UAAU,GAAGA,MAAK,EAAE;AAAA,EAAA;AAI7C,SAAA,EAAE,QAAQ,OAAO,UAAU,GAAG,OAAO,SAAS,CAAC,EAAE;AAC1D;AAEa,MAAA,qBAAqB,CAAC,YAAuD;AACpF,MAAA,QAAQ,WAAW,aAAa;AAC3B,WAAA;AAAA,MACL,aAAa,QAAQ,UAAU,YAAY;AAAA,MAC3C,iBAAiB,QAAQ,UAAU,YAAY;AAAA,IACjD;AAAA,EAAA;AAEK,SAAA;AACT;AC9BA,MAAM,oBAAoB,CAAC,QAAgB;AAElC,SAAA,YAAY,KAAK,GAAG;AAC7B;AAEA,MAAM,YAAY,CAAC,EAAE,SAAS,UAAU,WAAW,GAAG,sBAAmC;AACvF,MAAI,OAAO,KAAK,eAAe,EAAE,SAAS,GAAG;AACnC,YAAA;AAAA,MACN;AAAA,IACF;AAAA,EAAA;AAEF,QAAM,cAAc,mBAAmB,EAAE,WAAW,GAAG,iBAAiB;AACxE,QAAM,SAAS;AAAA,IACb,GAAG;AAAA,IACH,GAAG;AAAA,IACH,GAAI,cAAc,EAAE,gBAAgB,CAAA;AAAA,EACtC;AAEO,SAAA,OAAO,MAAMC,SAAMC,SAAAA,gBAAgB,aAAa,CAAC,UAAU,KAAK,GAAG,MAAM;AAEzE,SAAA;AACT;AAEA,MAAe,QAAA;AAAA,EACb,KAAK,EAAE,SAAS,UAAU,WAAW,GAAG,mBAAgC;AAEhE,UAAA,SAAS,UAAU,EAAE,SAAS,UAAU,WAAW,GAAG,iBAAiB;AACvE,UAAA,WAAW,IAAIC,SAAA,SAAS,MAAM;AAC9B,UAAA,aAAa,WAAW,GAAG,SAAS,QAAQ,QAAQ,EAAE,CAAC,MAAM;AAE7D,UAAA,aAAa,CAAC,SAAe;AACjC,YAAM,OAAO,KAAK,OAAO,GAAG,KAAK,IAAI,MAAM;AACpC,aAAA,GAAG,UAAU,GAAG,IAAI,GAAG,KAAK,IAAI,GAAG,KAAK,GAAG;AAAA,IACpD;AAEA,UAAM,SAAS,OAAO,MAAY,eAA+C,CAAA,MAAO;AAChF,YAAA,UAAU,WAAW,IAAI;AACzB,YAAA,YAAY,IAAIC,kBAAO;AAAA,QAC3B,QAAQ;AAAA,QACR,QAAQ;AAAA,UACN,QAAQ,OAAO,OAAO;AAAA,UACtB,KAAK;AAAA,UACL,MAAM,KAAK,UAAU,OAAO,KAAK,KAAK,QAAe,QAAQ;AAAA,UAC7D,KAAK,OAAO,OAAO;AAAA,UACnB,aAAa,KAAK;AAAA,UAClB,GAAG;AAAA,QAAA;AAAA,MACL,CACD;AAEKC,YAAAA,UAAU,MAAM,UAAU,KAAK;AAEjC,UAAA,kBAAkBA,QAAO,QAAQ,GAAG;AACtC,aAAK,MAAM,UAAU,GAAG,OAAO,IAAI,OAAO,KAAKA,QAAO;AAAA,MAAA,OACjD;AAEA,aAAA,MAAM,WAAWA,QAAO,QAAQ;AAAA,MAAA;AAAA,IAEzC;AAEO,WAAA;AAAA,MACL,YAAY;AACH,eAAA,OAAO,OAAO,QAAQ;AAAA,MAC/B;AAAA,MAEA,MAAM,aAAa,MAAY,cAA6C;AAEtE,YAAA,CAAC,gBAAgB,KAAK,KAAK,OAAO,OAAO,QAAQ,OAAO,GAAG;AACtD,iBAAA,EAAE,KAAK,KAAK,IAAI;AAAA,QAAA;AAEnB,cAAA,UAAU,WAAW,IAAI;AAE/B,cAAM,MAAM,MAAMC,mBAAA;AAAA;AAAA,UAEhB;AAAA,UACA,IAAIC,0BAAiB;AAAA,YACnB,QAAQ,OAAO,OAAO;AAAA,YACtB,KAAK;AAAA,YACL,GAAG;AAAA,UAAA,CACJ;AAAA,UACD;AAAA,YACE,WAAWN,SAAM,KAAK,IAAI,CAAC,UAAU,kBAAkB,GAAG,MAAM;AAAA,UAAA;AAAA,QAEpE;AAEA,eAAO,EAAE,IAAI;AAAA,MACf;AAAA,MACA,aAAa,MAAY,eAAe,IAAI;AACnC,eAAA,OAAO,MAAM,YAAY;AAAA,MAClC;AAAA,MACA,OAAO,MAAY,eAAe,IAAI;AAC7B,eAAA,OAAO,MAAM,YAAY;AAAA,MAClC;AAAA,MACA,OAAO,MAAY,eAAe,IAAwC;AAClE,cAAA,UAAU,IAAIO,6BAAoB;AAAA,UACtC,QAAQ,OAAO,OAAO;AAAA,UACtB,KAAK,WAAW,IAAI;AAAA,UACpB,GAAG;AAAA,QAAA,CACJ;AACM,eAAA,SAAS,KAAK,OAAO;AAAA,MAAA;AAAA,IAEhC;AAAA,EAAA;AAEJ;;"}
package/dist/index.mjs CHANGED
@@ -1,5 +1,7 @@
1
1
  import { getOr } from "lodash/fp";
2
- import AWS from "aws-sdk";
2
+ import { S3Client, GetObjectCommand, DeleteObjectCommand, ObjectCannedACL } from "@aws-sdk/client-s3";
3
+ import { getSignedUrl } from "@aws-sdk/s3-request-presigner";
4
+ import { Upload } from "@aws-sdk/lib-storage";
3
5
  const ENDPOINT_PATTERN = /^(.+\.)?s3[.-]([a-z0-9-]+)\./;
4
6
  function isUrlFromBucket(fileUrl, bucketName, baseUrl = "") {
5
7
  const url = new URL(fileUrl);
@@ -44,81 +46,84 @@ function getBucketFromAwsUrl(fileUrl) {
44
46
  }
45
47
  return { bucket: prefix.substring(0, prefix.length - 1) };
46
48
  }
47
- require("aws-sdk/lib/maintenance_mode_message").suppress = true;
48
- function hasUrlProtocol(url) {
49
+ const extractCredentials = (options) => {
50
+ if (options.s3Options?.credentials) {
51
+ return {
52
+ accessKeyId: options.s3Options.credentials.accessKeyId,
53
+ secretAccessKey: options.s3Options.credentials.secretAccessKey
54
+ };
55
+ }
56
+ return null;
57
+ };
58
+ const assertUrlProtocol = (url) => {
49
59
  return /^\w*:\/\//.test(url);
50
- }
60
+ };
61
+ const getConfig = ({ baseUrl, rootPath, s3Options, ...legacyS3Options }) => {
62
+ if (Object.keys(legacyS3Options).length > 0) {
63
+ process.emitWarning(
64
+ "S3 configuration options passed at root level of the plugin's providerOptions is deprecated and will be removed in a future release. Please wrap them inside the 's3Options:{}' property."
65
+ );
66
+ }
67
+ const credentials = extractCredentials({ s3Options, ...legacyS3Options });
68
+ const config = {
69
+ ...s3Options,
70
+ ...legacyS3Options,
71
+ ...credentials ? { credentials } : {}
72
+ };
73
+ config.params.ACL = getOr(ObjectCannedACL.public_read, ["params", "ACL"], config);
74
+ return config;
75
+ };
51
76
  const index = {
52
77
  init({ baseUrl, rootPath, s3Options, ...legacyS3Options }) {
53
- if (Object.keys(legacyS3Options).length > 0) {
54
- process.emitWarning(
55
- "S3 configuration options passed at root level of the plugin's providerOptions is deprecated and will be removed in a future release. Please wrap them inside the 's3Options:{}' property."
56
- );
57
- }
58
- const config = { ...s3Options, ...legacyS3Options };
59
- const S3 = new AWS.S3({
60
- apiVersion: "2006-03-01",
61
- ...config
62
- });
78
+ const config = getConfig({ baseUrl, rootPath, s3Options, ...legacyS3Options });
79
+ const s3Client = new S3Client(config);
63
80
  const filePrefix = rootPath ? `${rootPath.replace(/\/+$/, "")}/` : "";
64
81
  const getFileKey = (file) => {
65
82
  const path = file.path ? `${file.path}/` : "";
66
83
  return `${filePrefix}${path}${file.hash}${file.ext}`;
67
84
  };
68
- const ACL = getOr("public-read", ["params", "ACL"], config);
69
- const upload = (file, customParams = {}) => new Promise((resolve, reject) => {
85
+ const upload = async (file, customParams = {}) => {
70
86
  const fileKey = getFileKey(file);
71
- if (!file.stream && !file.buffer) {
72
- reject(new Error("Missing file stream or buffer"));
73
- return;
74
- }
75
- const params = {
76
- Key: fileKey,
77
- Bucket: config.params.Bucket,
78
- Body: file.stream || file.buffer,
79
- ACL,
80
- ContentType: file.mime,
81
- ...customParams
82
- };
83
- const onUploaded = (err, data) => {
84
- if (err) {
85
- return reject(err);
86
- }
87
- if (baseUrl) {
88
- file.url = `${baseUrl}/${fileKey}`;
89
- } else {
90
- file.url = hasUrlProtocol(data.Location) ? data.Location : `https://${data.Location}`;
87
+ const uploadObj = new Upload({
88
+ client: s3Client,
89
+ params: {
90
+ Bucket: config.params.Bucket,
91
+ Key: fileKey,
92
+ Body: file.stream || Buffer.from(file.buffer, "binary"),
93
+ ACL: config.params.ACL,
94
+ ContentType: file.mime,
95
+ ...customParams
91
96
  }
92
- resolve();
93
- };
94
- S3.upload(params, onUploaded);
95
- });
97
+ });
98
+ const upload2 = await uploadObj.done();
99
+ if (assertUrlProtocol(upload2.Location)) {
100
+ file.url = baseUrl ? `${baseUrl}/${fileKey}` : upload2.Location;
101
+ } else {
102
+ file.url = `https://${upload2.Location}`;
103
+ }
104
+ };
96
105
  return {
97
106
  isPrivate() {
98
- return ACL === "private";
107
+ return config.params.ACL === "private";
99
108
  },
100
- async getSignedUrl(file) {
109
+ async getSignedUrl(file, customParams) {
101
110
  if (!isUrlFromBucket(file.url, config.params.Bucket, baseUrl)) {
102
111
  return { url: file.url };
103
112
  }
104
- const signedUrlExpires = getOr(15 * 60, ["params", "signedUrlExpires"], config);
105
- return new Promise((resolve, reject) => {
106
- const fileKey = getFileKey(file);
107
- S3.getSignedUrl(
108
- "getObject",
109
- {
110
- Bucket: config.params.Bucket,
111
- Key: fileKey,
112
- Expires: parseInt(signedUrlExpires, 10)
113
- },
114
- (err, url) => {
115
- if (err) {
116
- return reject(err);
117
- }
118
- resolve({ url });
119
- }
120
- );
121
- });
113
+ const fileKey = getFileKey(file);
114
+ const url = await getSignedUrl(
115
+ // @ts-expect-error - TODO fix client type
116
+ s3Client,
117
+ new GetObjectCommand({
118
+ Bucket: config.params.Bucket,
119
+ Key: fileKey,
120
+ ...customParams
121
+ }),
122
+ {
123
+ expiresIn: getOr(15 * 60, ["params", "signedUrlExpires"], config)
124
+ }
125
+ );
126
+ return { url };
122
127
  },
123
128
  uploadStream(file, customParams = {}) {
124
129
  return upload(file, customParams);
@@ -127,22 +132,12 @@ const index = {
127
132
  return upload(file, customParams);
128
133
  },
129
134
  delete(file, customParams = {}) {
130
- return new Promise((resolve, reject) => {
131
- const fileKey = getFileKey(file);
132
- S3.deleteObject(
133
- {
134
- Key: fileKey,
135
- Bucket: config.params.Bucket,
136
- ...customParams
137
- },
138
- (err) => {
139
- if (err) {
140
- return reject(err);
141
- }
142
- resolve();
143
- }
144
- );
135
+ const command = new DeleteObjectCommand({
136
+ Bucket: config.params.Bucket,
137
+ Key: getFileKey(file),
138
+ ...customParams
145
139
  });
140
+ return s3Client.send(command);
146
141
  }
147
142
  };
148
143
  }
@@ -1 +1 @@
1
- {"version":3,"file":"index.mjs","sources":["../src/utils.ts","../src/index.ts"],"sourcesContent":["const ENDPOINT_PATTERN = /^(.+\\.)?s3[.-]([a-z0-9-]+)\\./;\n\ninterface BucketInfo {\n bucket?: string | null;\n err?: string;\n}\n\nexport function isUrlFromBucket(fileUrl: string, bucketName: string, baseUrl = ''): boolean {\n const url = new URL(fileUrl);\n\n // Check if the file URL is using a base URL (e.g. a CDN).\n // In this case do not sign the URL.\n if (baseUrl) {\n return false;\n }\n\n const { bucket } = getBucketFromAwsUrl(fileUrl);\n\n if (bucket) {\n return bucket === bucketName;\n }\n\n // File URL might be of an S3-compatible provider. (or an invalid URL)\n // In this case, check if the bucket name appears in the URL host or path.\n // e.g. https://minio.example.com/bucket-name/object-key\n // e.g. https://bucket.nyc3.digitaloceanspaces.com/folder/img.png\n return url.host.startsWith(`${bucketName}.`) || url.pathname.includes(`/${bucketName}/`);\n}\n\n/**\n * Parse the bucket name from a URL.\n * See all URL formats in https://docs.aws.amazon.com/AmazonS3/latest/userguide/access-bucket-intro.html\n *\n * @param {string} fileUrl - the URL to parse\n * @returns {object} result\n * @returns {string} result.bucket - the bucket name\n * @returns {string} result.err - if any\n */\nfunction getBucketFromAwsUrl(fileUrl: string): BucketInfo {\n const url = new URL(fileUrl);\n\n // S3://<bucket-name>/<key>\n if (url.protocol === 's3:') {\n const bucket = url.host;\n\n if (!bucket) {\n return { err: `Invalid S3 url: no bucket: ${url}` };\n }\n return { bucket };\n }\n\n if (!url.host) {\n return { err: `Invalid S3 url: no hostname: ${url}` };\n }\n\n const matches = url.host.match(ENDPOINT_PATTERN);\n if (!matches) {\n return { err: `Invalid S3 url: hostname does not appear to be a valid S3 endpoint: ${url}` };\n }\n\n const prefix = matches[1];\n // https://s3.amazonaws.com/<bucket-name>\n if (!prefix) {\n if (url.pathname === '/') {\n return { bucket: null };\n }\n\n const index = url.pathname.indexOf('/', 1);\n\n // https://s3.amazonaws.com/<bucket-name>\n if (index === -1) {\n return { bucket: url.pathname.substring(1) };\n }\n\n // https://s3.amazonaws.com/<bucket-name>/\n if (index === url.pathname.length - 1) {\n return { bucket: url.pathname.substring(1, index) };\n }\n\n // https://s3.amazonaws.com/<bucket-name>/key\n return { bucket: url.pathname.substring(1, index) };\n }\n\n // https://<bucket-name>.s3.amazonaws.com/\n return { bucket: prefix.substring(0, prefix.length - 1) };\n}\n","import type { ReadStream } from 'node:fs';\nimport { getOr } from 'lodash/fp';\nimport AWS from 'aws-sdk';\nimport { isUrlFromBucket } from './utils';\n\ninterface File {\n name: string;\n alternativeText?: string;\n caption?: string;\n width?: number;\n height?: number;\n formats?: Record<string, unknown>;\n hash: string;\n ext?: string;\n mime: string;\n size: number;\n url: string;\n previewUrl?: string;\n path?: string;\n provider?: string;\n provider_metadata?: Record<string, unknown>;\n stream?: ReadStream;\n buffer?: Buffer;\n}\n\n// TODO V5: Migrate to aws-sdk v3\n// eslint-disable-next-line @typescript-eslint/no-var-requires\nrequire('aws-sdk/lib/maintenance_mode_message').suppress = true;\n\nfunction hasUrlProtocol(url: string) {\n // Regex to test protocol like \"http://\", \"https://\"\n return /^\\w*:\\/\\//.test(url);\n}\n\ninterface InitOptions extends Partial<AWS.S3.ClientConfiguration> {\n baseUrl?: string;\n rootPath?: string;\n s3Options: AWS.S3.ClientConfiguration & {\n params: {\n Bucket: string; // making it required\n ACL?: string;\n signedUrlExpires?: string;\n };\n };\n}\n\nexport default {\n init({ baseUrl, rootPath, s3Options, ...legacyS3Options }: InitOptions) {\n if (Object.keys(legacyS3Options).length > 0) {\n process.emitWarning(\n \"S3 configuration options passed at root level of the plugin's providerOptions is deprecated and will be removed in a future release. Please wrap them inside the 's3Options:{}' property.\"\n );\n }\n\n const config = { ...s3Options, ...legacyS3Options };\n\n const S3 = new AWS.S3({\n apiVersion: '2006-03-01',\n ...config,\n });\n\n const filePrefix = rootPath ? `${rootPath.replace(/\\/+$/, '')}/` : '';\n\n const getFileKey = (file: File) => {\n const path = file.path ? `${file.path}/` : '';\n\n return `${filePrefix}${path}${file.hash}${file.ext}`;\n };\n\n const ACL = getOr('public-read', ['params', 'ACL'], config);\n\n const upload = (file: File, customParams = {}): Promise<void> =>\n new Promise((resolve, reject) => {\n const fileKey = getFileKey(file);\n\n if (!file.stream && !file.buffer) {\n reject(new Error('Missing file stream or buffer'));\n return;\n }\n\n const params = {\n Key: fileKey,\n Bucket: config.params.Bucket,\n Body: file.stream || file.buffer,\n ACL,\n ContentType: file.mime,\n ...customParams,\n };\n\n const onUploaded = (err: Error, data: AWS.S3.ManagedUpload.SendData) => {\n if (err) {\n return reject(err);\n }\n\n // set the bucket file url\n if (baseUrl) {\n // Construct the url with the baseUrl\n file.url = `${baseUrl}/${fileKey}`;\n } else {\n // Add the protocol if it is missing\n // Some providers like DigitalOcean Spaces return the url without the protocol\n file.url = hasUrlProtocol(data.Location) ? data.Location : `https://${data.Location}`;\n }\n resolve();\n };\n\n S3.upload(params, onUploaded);\n });\n\n return {\n isPrivate() {\n return ACL === 'private';\n },\n async getSignedUrl(file: File): Promise<{ url: string }> {\n // Do not sign the url if it does not come from the same bucket.\n if (!isUrlFromBucket(file.url, config.params.Bucket, baseUrl)) {\n return { url: file.url };\n }\n\n const signedUrlExpires: string = getOr(15 * 60, ['params', 'signedUrlExpires'], config); // 15 minutes\n\n return new Promise((resolve, reject) => {\n const fileKey = getFileKey(file);\n\n S3.getSignedUrl(\n 'getObject',\n {\n Bucket: config.params.Bucket,\n Key: fileKey,\n Expires: parseInt(signedUrlExpires, 10),\n },\n (err, url) => {\n if (err) {\n return reject(err);\n }\n resolve({ url });\n }\n );\n });\n },\n uploadStream(file: File, customParams = {}) {\n return upload(file, customParams);\n },\n upload(file: File, customParams = {}) {\n return upload(file, customParams);\n },\n delete(file: File, customParams = {}): Promise<void> {\n return new Promise((resolve, reject) => {\n // delete file on S3 bucket\n const fileKey = getFileKey(file);\n S3.deleteObject(\n {\n Key: fileKey,\n Bucket: config.params.Bucket,\n ...customParams,\n },\n (err) => {\n if (err) {\n return reject(err);\n }\n\n resolve();\n }\n );\n });\n },\n };\n },\n};\n"],"names":["index"],"mappings":";;AAAA,MAAM,mBAAmB;AAOlB,SAAS,gBAAgB,SAAiB,YAAoB,UAAU,IAAa;AACpF,QAAA,MAAM,IAAI,IAAI,OAAO;AAI3B,MAAI,SAAS;AACJ,WAAA;AAAA,EACT;AAEA,QAAM,EAAE,OAAA,IAAW,oBAAoB,OAAO;AAE9C,MAAI,QAAQ;AACV,WAAO,WAAW;AAAA,EACpB;AAMA,SAAO,IAAI,KAAK,WAAW,GAAG,UAAU,GAAG,KAAK,IAAI,SAAS,SAAS,IAAI,UAAU,GAAG;AACzF;AAWA,SAAS,oBAAoB,SAA6B;AAClD,QAAA,MAAM,IAAI,IAAI,OAAO;AAGvB,MAAA,IAAI,aAAa,OAAO;AAC1B,UAAM,SAAS,IAAI;AAEnB,QAAI,CAAC,QAAQ;AACX,aAAO,EAAE,KAAK,8BAA8B,GAAG,GAAG;AAAA,IACpD;AACA,WAAO,EAAE,OAAO;AAAA,EAClB;AAEI,MAAA,CAAC,IAAI,MAAM;AACb,WAAO,EAAE,KAAK,gCAAgC,GAAG,GAAG;AAAA,EACtD;AAEA,QAAM,UAAU,IAAI,KAAK,MAAM,gBAAgB;AAC/C,MAAI,CAAC,SAAS;AACZ,WAAO,EAAE,KAAK,uEAAuE,GAAG,GAAG;AAAA,EAC7F;AAEM,QAAA,SAAS,QAAQ,CAAC;AAExB,MAAI,CAAC,QAAQ;AACP,QAAA,IAAI,aAAa,KAAK;AACjB,aAAA,EAAE,QAAQ;IACnB;AAEA,UAAMA,SAAQ,IAAI,SAAS,QAAQ,KAAK,CAAC;AAGzC,QAAIA,WAAU,IAAI;AAChB,aAAO,EAAE,QAAQ,IAAI,SAAS,UAAU,CAAC;IAC3C;AAGA,QAAIA,WAAU,IAAI,SAAS,SAAS,GAAG;AACrC,aAAO,EAAE,QAAQ,IAAI,SAAS,UAAU,GAAGA,MAAK;IAClD;AAGA,WAAO,EAAE,QAAQ,IAAI,SAAS,UAAU,GAAGA,MAAK;EAClD;AAGO,SAAA,EAAE,QAAQ,OAAO,UAAU,GAAG,OAAO,SAAS,CAAC;AACxD;AC1DA,QAAQ,sCAAsC,EAAE,WAAW;AAE3D,SAAS,eAAe,KAAa;AAE5B,SAAA,YAAY,KAAK,GAAG;AAC7B;AAcA,MAAe,QAAA;AAAA,EACb,KAAK,EAAE,SAAS,UAAU,WAAW,GAAG,mBAAgC;AACtE,QAAI,OAAO,KAAK,eAAe,EAAE,SAAS,GAAG;AACnC,cAAA;AAAA,QACN;AAAA,MAAA;AAAA,IAEJ;AAEA,UAAM,SAAS,EAAE,GAAG,WAAW,GAAG,gBAAgB;AAE5C,UAAA,KAAK,IAAI,IAAI,GAAG;AAAA,MACpB,YAAY;AAAA,MACZ,GAAG;AAAA,IAAA,CACJ;AAEK,UAAA,aAAa,WAAW,GAAG,SAAS,QAAQ,QAAQ,EAAE,CAAC,MAAM;AAE7D,UAAA,aAAa,CAAC,SAAe;AACjC,YAAM,OAAO,KAAK,OAAO,GAAG,KAAK,IAAI,MAAM;AAEpC,aAAA,GAAG,UAAU,GAAG,IAAI,GAAG,KAAK,IAAI,GAAG,KAAK,GAAG;AAAA,IAAA;AAGpD,UAAM,MAAM,MAAM,eAAe,CAAC,UAAU,KAAK,GAAG,MAAM;AAEpD,UAAA,SAAS,CAAC,MAAY,eAAe,OACzC,IAAI,QAAQ,CAAC,SAAS,WAAW;AACzB,YAAA,UAAU,WAAW,IAAI;AAE/B,UAAI,CAAC,KAAK,UAAU,CAAC,KAAK,QAAQ;AACzB,eAAA,IAAI,MAAM,+BAA+B,CAAC;AACjD;AAAA,MACF;AAEA,YAAM,SAAS;AAAA,QACb,KAAK;AAAA,QACL,QAAQ,OAAO,OAAO;AAAA,QACtB,MAAM,KAAK,UAAU,KAAK;AAAA,QAC1B;AAAA,QACA,aAAa,KAAK;AAAA,QAClB,GAAG;AAAA,MAAA;AAGC,YAAA,aAAa,CAAC,KAAY,SAAwC;AACtE,YAAI,KAAK;AACP,iBAAO,OAAO,GAAG;AAAA,QACnB;AAGA,YAAI,SAAS;AAEX,eAAK,MAAM,GAAG,OAAO,IAAI,OAAO;AAAA,QAAA,OAC3B;AAGA,eAAA,MAAM,eAAe,KAAK,QAAQ,IAAI,KAAK,WAAW,WAAW,KAAK,QAAQ;AAAA,QACrF;AACQ;MAAA;AAGP,SAAA,OAAO,QAAQ,UAAU;AAAA,IAAA,CAC7B;AAEI,WAAA;AAAA,MACL,YAAY;AACV,eAAO,QAAQ;AAAA,MACjB;AAAA,MACA,MAAM,aAAa,MAAsC;AAEnD,YAAA,CAAC,gBAAgB,KAAK,KAAK,OAAO,OAAO,QAAQ,OAAO,GAAG;AACtD,iBAAA,EAAE,KAAK,KAAK;QACrB;AAEM,cAAA,mBAA2B,MAAM,KAAK,IAAI,CAAC,UAAU,kBAAkB,GAAG,MAAM;AAEtF,eAAO,IAAI,QAAQ,CAAC,SAAS,WAAW;AAChC,gBAAA,UAAU,WAAW,IAAI;AAE5B,aAAA;AAAA,YACD;AAAA,YACA;AAAA,cACE,QAAQ,OAAO,OAAO;AAAA,cACtB,KAAK;AAAA,cACL,SAAS,SAAS,kBAAkB,EAAE;AAAA,YACxC;AAAA,YACA,CAAC,KAAK,QAAQ;AACZ,kBAAI,KAAK;AACP,uBAAO,OAAO,GAAG;AAAA,cACnB;AACQ,sBAAA,EAAE,KAAK;AAAA,YACjB;AAAA,UAAA;AAAA,QACF,CACD;AAAA,MACH;AAAA,MACA,aAAa,MAAY,eAAe,IAAI;AACnC,eAAA,OAAO,MAAM,YAAY;AAAA,MAClC;AAAA,MACA,OAAO,MAAY,eAAe,IAAI;AAC7B,eAAA,OAAO,MAAM,YAAY;AAAA,MAClC;AAAA,MACA,OAAO,MAAY,eAAe,IAAmB;AACnD,eAAO,IAAI,QAAQ,CAAC,SAAS,WAAW;AAEhC,gBAAA,UAAU,WAAW,IAAI;AAC5B,aAAA;AAAA,YACD;AAAA,cACE,KAAK;AAAA,cACL,QAAQ,OAAO,OAAO;AAAA,cACtB,GAAG;AAAA,YACL;AAAA,YACA,CAAC,QAAQ;AACP,kBAAI,KAAK;AACP,uBAAO,OAAO,GAAG;AAAA,cACnB;AAEQ;YACV;AAAA,UAAA;AAAA,QACF,CACD;AAAA,MACH;AAAA,IAAA;AAAA,EAEJ;AACF;"}
1
+ {"version":3,"file":"index.mjs","sources":["../src/utils.ts","../src/index.ts"],"sourcesContent":["import type { AwsCredentialIdentity } from '@aws-sdk/types';\nimport type { InitOptions } from '.';\n\nconst ENDPOINT_PATTERN = /^(.+\\.)?s3[.-]([a-z0-9-]+)\\./;\n\ninterface BucketInfo {\n bucket?: string | null;\n err?: string;\n}\n\nexport function isUrlFromBucket(fileUrl: string, bucketName: string, baseUrl = ''): boolean {\n const url = new URL(fileUrl);\n\n // Check if the file URL is using a base URL (e.g. a CDN).\n // In this case do not sign the URL.\n if (baseUrl) {\n return false;\n }\n\n const { bucket } = getBucketFromAwsUrl(fileUrl);\n\n if (bucket) {\n return bucket === bucketName;\n }\n\n // File URL might be of an S3-compatible provider. (or an invalid URL)\n // In this case, check if the bucket name appears in the URL host or path.\n // e.g. https://minio.example.com/bucket-name/object-key\n // e.g. https://bucket.nyc3.digitaloceanspaces.com/folder/img.png\n return url.host.startsWith(`${bucketName}.`) || url.pathname.includes(`/${bucketName}/`);\n}\n\n/**\n * Parse the bucket name from a URL.\n * See all URL formats in https://docs.aws.amazon.com/AmazonS3/latest/userguide/access-bucket-intro.html\n *\n * @param {string} fileUrl - the URL to parse\n * @returns {object} result\n * @returns {string} result.bucket - the bucket name\n * @returns {string} result.err - if any\n */\nfunction getBucketFromAwsUrl(fileUrl: string): BucketInfo {\n const url = new URL(fileUrl);\n\n // S3://<bucket-name>/<key>\n if (url.protocol === 's3:') {\n const bucket = url.host;\n\n if (!bucket) {\n return { err: `Invalid S3 url: no bucket: ${url}` };\n }\n return { bucket };\n }\n\n if (!url.host) {\n return { err: `Invalid S3 url: no hostname: ${url}` };\n }\n\n const matches = url.host.match(ENDPOINT_PATTERN);\n if (!matches) {\n return { err: `Invalid S3 url: hostname does not appear to be a valid S3 endpoint: ${url}` };\n }\n\n const prefix = matches[1];\n // https://s3.amazonaws.com/<bucket-name>\n if (!prefix) {\n if (url.pathname === '/') {\n return { bucket: null };\n }\n\n const index = url.pathname.indexOf('/', 1);\n\n // https://s3.amazonaws.com/<bucket-name>\n if (index === -1) {\n return { bucket: url.pathname.substring(1) };\n }\n\n // https://s3.amazonaws.com/<bucket-name>/\n if (index === url.pathname.length - 1) {\n return { bucket: url.pathname.substring(1, index) };\n }\n\n // https://s3.amazonaws.com/<bucket-name>/key\n return { bucket: url.pathname.substring(1, index) };\n }\n\n // https://<bucket-name>.s3.amazonaws.com/\n return { bucket: prefix.substring(0, prefix.length - 1) };\n}\n\nexport const extractCredentials = (options: InitOptions): AwsCredentialIdentity | null => {\n if (options.s3Options?.credentials) {\n return {\n accessKeyId: options.s3Options.credentials.accessKeyId,\n secretAccessKey: options.s3Options.credentials.secretAccessKey,\n };\n }\n return null;\n};\n","import type { ReadStream } from 'node:fs';\nimport { getOr } from 'lodash/fp';\nimport {\n S3Client,\n GetObjectCommand,\n DeleteObjectCommand,\n DeleteObjectCommandOutput,\n PutObjectCommandInput,\n CompleteMultipartUploadCommandOutput,\n AbortMultipartUploadCommandOutput,\n S3ClientConfig,\n ObjectCannedACL,\n} from '@aws-sdk/client-s3';\nimport type { AwsCredentialIdentity } from '@aws-sdk/types';\nimport { getSignedUrl } from '@aws-sdk/s3-request-presigner';\nimport { Upload } from '@aws-sdk/lib-storage';\nimport { extractCredentials, isUrlFromBucket } from './utils';\n\nexport interface File {\n name: string;\n alternativeText?: string;\n caption?: string;\n width?: number;\n height?: number;\n formats?: Record<string, unknown>;\n hash: string;\n ext?: string;\n mime: string;\n size: number;\n sizeInBytes: number;\n url: string;\n previewUrl?: string;\n path?: string;\n provider?: string;\n provider_metadata?: Record<string, unknown>;\n stream?: ReadStream;\n buffer?: Buffer;\n}\n\nexport type UploadCommandOutput = (\n | CompleteMultipartUploadCommandOutput\n | AbortMultipartUploadCommandOutput\n) & {\n Location: string;\n};\n\nexport interface AWSParams {\n Bucket: string; // making it required\n ACL?: ObjectCannedACL;\n signedUrlExpires?: number;\n}\n\nexport interface DefaultOptions extends S3ClientConfig {\n // TODO Remove this in V5\n accessKeyId?: AwsCredentialIdentity['accessKeyId'];\n secretAccessKey?: AwsCredentialIdentity['secretAccessKey'];\n // Keep this for V5\n credentials?: AwsCredentialIdentity;\n params?: AWSParams;\n [k: string]: any;\n}\n\nexport type InitOptions = (DefaultOptions | { s3Options: DefaultOptions }) & {\n baseUrl?: string;\n rootPath?: string;\n [k: string]: any;\n};\n\nconst assertUrlProtocol = (url: string) => {\n // Regex to test protocol like \"http://\", \"https://\"\n return /^\\w*:\\/\\//.test(url);\n};\n\nconst getConfig = ({ baseUrl, rootPath, s3Options, ...legacyS3Options }: InitOptions) => {\n if (Object.keys(legacyS3Options).length > 0) {\n process.emitWarning(\n \"S3 configuration options passed at root level of the plugin's providerOptions is deprecated and will be removed in a future release. Please wrap them inside the 's3Options:{}' property.\"\n );\n }\n const credentials = extractCredentials({ s3Options, ...legacyS3Options });\n const config = {\n ...s3Options,\n ...legacyS3Options,\n ...(credentials ? { credentials } : {}),\n };\n\n config.params.ACL = getOr(ObjectCannedACL.public_read, ['params', 'ACL'], config);\n\n return config;\n};\n\nexport default {\n init({ baseUrl, rootPath, s3Options, ...legacyS3Options }: InitOptions) {\n // TODO V5 change config structure to avoid having to do this\n const config = getConfig({ baseUrl, rootPath, s3Options, ...legacyS3Options });\n const s3Client = new S3Client(config);\n const filePrefix = rootPath ? `${rootPath.replace(/\\/+$/, '')}/` : '';\n\n const getFileKey = (file: File) => {\n const path = file.path ? `${file.path}/` : '';\n return `${filePrefix}${path}${file.hash}${file.ext}`;\n };\n\n const upload = async (file: File, customParams: Partial<PutObjectCommandInput> = {}) => {\n const fileKey = getFileKey(file);\n const uploadObj = new Upload({\n client: s3Client,\n params: {\n Bucket: config.params.Bucket,\n Key: fileKey,\n Body: file.stream || Buffer.from(file.buffer as any, 'binary'),\n ACL: config.params.ACL,\n ContentType: file.mime,\n ...customParams,\n },\n });\n\n const upload = (await uploadObj.done()) as UploadCommandOutput;\n\n if (assertUrlProtocol(upload.Location)) {\n file.url = baseUrl ? `${baseUrl}/${fileKey}` : upload.Location;\n } else {\n // Default protocol to https protocol\n file.url = `https://${upload.Location}`;\n }\n };\n\n return {\n isPrivate() {\n return config.params.ACL === 'private';\n },\n\n async getSignedUrl(file: File, customParams: any): Promise<{ url: string }> {\n // Do not sign the url if it does not come from the same bucket.\n if (!isUrlFromBucket(file.url, config.params.Bucket, baseUrl)) {\n return { url: file.url };\n }\n const fileKey = getFileKey(file);\n\n const url = await getSignedUrl(\n // @ts-expect-error - TODO fix client type\n s3Client,\n new GetObjectCommand({\n Bucket: config.params.Bucket,\n Key: fileKey,\n ...customParams,\n }),\n {\n expiresIn: getOr(15 * 60, ['params', 'signedUrlExpires'], config),\n }\n );\n\n return { url };\n },\n uploadStream(file: File, customParams = {}) {\n return upload(file, customParams);\n },\n upload(file: File, customParams = {}) {\n return upload(file, customParams);\n },\n delete(file: File, customParams = {}): Promise<DeleteObjectCommandOutput> {\n const command = new DeleteObjectCommand({\n Bucket: config.params.Bucket,\n Key: getFileKey(file),\n ...customParams,\n });\n return s3Client.send(command);\n },\n };\n },\n};\n"],"names":["index","upload"],"mappings":";;;;AAGA,MAAM,mBAAmB;AAOlB,SAAS,gBAAgB,SAAiB,YAAoB,UAAU,IAAa;AACpF,QAAA,MAAM,IAAI,IAAI,OAAO;AAI3B,MAAI,SAAS;AACJ,WAAA;AAAA,EAAA;AAGT,QAAM,EAAE,OAAA,IAAW,oBAAoB,OAAO;AAE9C,MAAI,QAAQ;AACV,WAAO,WAAW;AAAA,EAAA;AAOpB,SAAO,IAAI,KAAK,WAAW,GAAG,UAAU,GAAG,KAAK,IAAI,SAAS,SAAS,IAAI,UAAU,GAAG;AACzF;AAWA,SAAS,oBAAoB,SAA6B;AAClD,QAAA,MAAM,IAAI,IAAI,OAAO;AAGvB,MAAA,IAAI,aAAa,OAAO;AAC1B,UAAM,SAAS,IAAI;AAEnB,QAAI,CAAC,QAAQ;AACX,aAAO,EAAE,KAAK,8BAA8B,GAAG,GAAG;AAAA,IAAA;AAEpD,WAAO,EAAE,OAAO;AAAA,EAAA;AAGd,MAAA,CAAC,IAAI,MAAM;AACb,WAAO,EAAE,KAAK,gCAAgC,GAAG,GAAG;AAAA,EAAA;AAGtD,QAAM,UAAU,IAAI,KAAK,MAAM,gBAAgB;AAC/C,MAAI,CAAC,SAAS;AACZ,WAAO,EAAE,KAAK,uEAAuE,GAAG,GAAG;AAAA,EAAA;AAGvF,QAAA,SAAS,QAAQ,CAAC;AAExB,MAAI,CAAC,QAAQ;AACP,QAAA,IAAI,aAAa,KAAK;AACjB,aAAA,EAAE,QAAQ,KAAK;AAAA,IAAA;AAGxB,UAAMA,SAAQ,IAAI,SAAS,QAAQ,KAAK,CAAC;AAGzC,QAAIA,WAAU,IAAI;AAChB,aAAO,EAAE,QAAQ,IAAI,SAAS,UAAU,CAAC,EAAE;AAAA,IAAA;AAI7C,QAAIA,WAAU,IAAI,SAAS,SAAS,GAAG;AACrC,aAAO,EAAE,QAAQ,IAAI,SAAS,UAAU,GAAGA,MAAK,EAAE;AAAA,IAAA;AAIpD,WAAO,EAAE,QAAQ,IAAI,SAAS,UAAU,GAAGA,MAAK,EAAE;AAAA,EAAA;AAI7C,SAAA,EAAE,QAAQ,OAAO,UAAU,GAAG,OAAO,SAAS,CAAC,EAAE;AAC1D;AAEa,MAAA,qBAAqB,CAAC,YAAuD;AACpF,MAAA,QAAQ,WAAW,aAAa;AAC3B,WAAA;AAAA,MACL,aAAa,QAAQ,UAAU,YAAY;AAAA,MAC3C,iBAAiB,QAAQ,UAAU,YAAY;AAAA,IACjD;AAAA,EAAA;AAEK,SAAA;AACT;AC9BA,MAAM,oBAAoB,CAAC,QAAgB;AAElC,SAAA,YAAY,KAAK,GAAG;AAC7B;AAEA,MAAM,YAAY,CAAC,EAAE,SAAS,UAAU,WAAW,GAAG,sBAAmC;AACvF,MAAI,OAAO,KAAK,eAAe,EAAE,SAAS,GAAG;AACnC,YAAA;AAAA,MACN;AAAA,IACF;AAAA,EAAA;AAEF,QAAM,cAAc,mBAAmB,EAAE,WAAW,GAAG,iBAAiB;AACxE,QAAM,SAAS;AAAA,IACb,GAAG;AAAA,IACH,GAAG;AAAA,IACH,GAAI,cAAc,EAAE,gBAAgB,CAAA;AAAA,EACtC;AAEO,SAAA,OAAO,MAAM,MAAM,gBAAgB,aAAa,CAAC,UAAU,KAAK,GAAG,MAAM;AAEzE,SAAA;AACT;AAEA,MAAe,QAAA;AAAA,EACb,KAAK,EAAE,SAAS,UAAU,WAAW,GAAG,mBAAgC;AAEhE,UAAA,SAAS,UAAU,EAAE,SAAS,UAAU,WAAW,GAAG,iBAAiB;AACvE,UAAA,WAAW,IAAI,SAAS,MAAM;AAC9B,UAAA,aAAa,WAAW,GAAG,SAAS,QAAQ,QAAQ,EAAE,CAAC,MAAM;AAE7D,UAAA,aAAa,CAAC,SAAe;AACjC,YAAM,OAAO,KAAK,OAAO,GAAG,KAAK,IAAI,MAAM;AACpC,aAAA,GAAG,UAAU,GAAG,IAAI,GAAG,KAAK,IAAI,GAAG,KAAK,GAAG;AAAA,IACpD;AAEA,UAAM,SAAS,OAAO,MAAY,eAA+C,CAAA,MAAO;AAChF,YAAA,UAAU,WAAW,IAAI;AACzB,YAAA,YAAY,IAAI,OAAO;AAAA,QAC3B,QAAQ;AAAA,QACR,QAAQ;AAAA,UACN,QAAQ,OAAO,OAAO;AAAA,UACtB,KAAK;AAAA,UACL,MAAM,KAAK,UAAU,OAAO,KAAK,KAAK,QAAe,QAAQ;AAAA,UAC7D,KAAK,OAAO,OAAO;AAAA,UACnB,aAAa,KAAK;AAAA,UAClB,GAAG;AAAA,QAAA;AAAA,MACL,CACD;AAEKC,YAAAA,UAAU,MAAM,UAAU,KAAK;AAEjC,UAAA,kBAAkBA,QAAO,QAAQ,GAAG;AACtC,aAAK,MAAM,UAAU,GAAG,OAAO,IAAI,OAAO,KAAKA,QAAO;AAAA,MAAA,OACjD;AAEA,aAAA,MAAM,WAAWA,QAAO,QAAQ;AAAA,MAAA;AAAA,IAEzC;AAEO,WAAA;AAAA,MACL,YAAY;AACH,eAAA,OAAO,OAAO,QAAQ;AAAA,MAC/B;AAAA,MAEA,MAAM,aAAa,MAAY,cAA6C;AAEtE,YAAA,CAAC,gBAAgB,KAAK,KAAK,OAAO,OAAO,QAAQ,OAAO,GAAG;AACtD,iBAAA,EAAE,KAAK,KAAK,IAAI;AAAA,QAAA;AAEnB,cAAA,UAAU,WAAW,IAAI;AAE/B,cAAM,MAAM,MAAM;AAAA;AAAA,UAEhB;AAAA,UACA,IAAI,iBAAiB;AAAA,YACnB,QAAQ,OAAO,OAAO;AAAA,YACtB,KAAK;AAAA,YACL,GAAG;AAAA,UAAA,CACJ;AAAA,UACD;AAAA,YACE,WAAW,MAAM,KAAK,IAAI,CAAC,UAAU,kBAAkB,GAAG,MAAM;AAAA,UAAA;AAAA,QAEpE;AAEA,eAAO,EAAE,IAAI;AAAA,MACf;AAAA,MACA,aAAa,MAAY,eAAe,IAAI;AACnC,eAAA,OAAO,MAAM,YAAY;AAAA,MAClC;AAAA,MACA,OAAO,MAAY,eAAe,IAAI;AAC7B,eAAA,OAAO,MAAM,YAAY;AAAA,MAClC;AAAA,MACA,OAAO,MAAY,eAAe,IAAwC;AAClE,cAAA,UAAU,IAAI,oBAAoB;AAAA,UACtC,QAAQ,OAAO,OAAO;AAAA,UACtB,KAAK,WAAW,IAAI;AAAA,UACpB,GAAG;AAAA,QAAA,CACJ;AACM,eAAA,SAAS,KAAK,OAAO;AAAA,MAAA;AAAA,IAEhC;AAAA,EAAA;AAEJ;"}
package/dist/utils.d.ts CHANGED
@@ -1,2 +1,5 @@
1
+ import type { AwsCredentialIdentity } from '@aws-sdk/types';
2
+ import type { InitOptions } from '.';
1
3
  export declare function isUrlFromBucket(fileUrl: string, bucketName: string, baseUrl?: string): boolean;
4
+ export declare const extractCredentials: (options: InitOptions) => AwsCredentialIdentity | null;
2
5
  //# sourceMappingURL=utils.d.ts.map
@@ -1 +1 @@
1
- {"version":3,"file":"utils.d.ts","sourceRoot":"","sources":["../src/utils.ts"],"names":[],"mappings":"AAOA,wBAAgB,eAAe,CAAC,OAAO,EAAE,MAAM,EAAE,UAAU,EAAE,MAAM,EAAE,OAAO,SAAK,GAAG,OAAO,CAoB1F"}
1
+ {"version":3,"file":"utils.d.ts","sourceRoot":"","sources":["../src/utils.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,qBAAqB,EAAE,MAAM,gBAAgB,CAAC;AAC5D,OAAO,KAAK,EAAE,WAAW,EAAE,MAAM,GAAG,CAAC;AASrC,wBAAgB,eAAe,CAAC,OAAO,EAAE,MAAM,EAAE,UAAU,EAAE,MAAM,EAAE,OAAO,SAAK,GAAG,OAAO,CAoB1F;AA4DD,eAAO,MAAM,kBAAkB,YAAa,WAAW,KAAG,qBAAqB,GAAG,IAQjF,CAAC"}
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@strapi/provider-upload-aws-s3",
3
- "version": "0.0.0-next.f5b09a8e61e059f02784478e27c310c6290be088",
3
+ "version": "0.0.0-next.f698d55751345c4ca87477ef683475c1a68f310a",
4
4
  "description": "AWS S3 provider for strapi upload",
5
5
  "keywords": [
6
6
  "upload",
@@ -34,30 +34,32 @@
34
34
  "source": "./src/index.ts",
35
35
  "types": "./dist/index.d.ts",
36
36
  "files": [
37
- "./dist"
37
+ "dist/"
38
38
  ],
39
39
  "scripts": {
40
40
  "build": "pack-up build",
41
41
  "clean": "run -T rimraf ./dist",
42
42
  "lint": "run -T eslint .",
43
- "prepublishOnly": "yarn clean && yarn build",
44
43
  "test:unit": "run -T jest",
45
44
  "test:unit:watch": "run -T jest --watch",
46
45
  "watch": "pack-up watch"
47
46
  },
48
47
  "dependencies": {
49
- "aws-sdk": "2.1472.0",
48
+ "@aws-sdk/client-s3": "3.600.0",
49
+ "@aws-sdk/lib-storage": "3.433.0",
50
+ "@aws-sdk/s3-request-presigner": "3.433.0",
51
+ "@aws-sdk/types": "3.433.0",
50
52
  "lodash": "4.17.21"
51
53
  },
52
54
  "devDependencies": {
53
- "@strapi/pack-up": "0.0.0-next.f5b09a8e61e059f02784478e27c310c6290be088",
55
+ "@strapi/pack-up": "5.0.2",
54
56
  "@types/jest": "29.5.2",
55
- "eslint-config-custom": "0.0.0-next.f5b09a8e61e059f02784478e27c310c6290be088",
56
- "tsconfig": "0.0.0-next.f5b09a8e61e059f02784478e27c310c6290be088"
57
+ "eslint-config-custom": "0.0.0-next.f698d55751345c4ca87477ef683475c1a68f310a",
58
+ "tsconfig": "0.0.0-next.f698d55751345c4ca87477ef683475c1a68f310a"
57
59
  },
58
60
  "engines": {
59
- "node": ">=18.0.0 <=20.x.x",
61
+ "node": ">=18.0.0 <=22.x.x",
60
62
  "npm": ">=6.0.0"
61
63
  },
62
- "gitHead": "f5b09a8e61e059f02784478e27c310c6290be088"
64
+ "gitHead": "f698d55751345c4ca87477ef683475c1a68f310a"
63
65
  }