picgo-plugin-s3 1.2.4 → 1.3.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -29,10 +29,9 @@ picgo set uploader aws-s3
29
29
  | `endpoint` | 指定自定义终端节点 | `s3.us-west-2.amazonaws.com` |
30
30
  | `proxy` | 代理地址 | 支持http代理,例如 `http://127.0.0.1:1080` |
31
31
  | `region` | 指定执行服务请求的区域 | `us-west-1` |
32
- | `pathStyleAccess` | 是否启用 S3 Path style | 默认为 `false`,使用 minio 请设置为 `true` |
32
+ | `pathStyleAccess` | 是否启用 S3 Path style | 默认为 `false`,使用 minio 请设置为 `true` (e.g., https://s3.amazonaws.com/<bucketName>/<key> instead of https://<bucketName>.s3.amazonaws.com/<key>) |
33
33
  | `rejectUnauthorized` | 是否拒绝无效 TLS 证书连接 | 默认为 `true`,如上传失败日志显示证书问题可设置为`false` |
34
- | `bucketEndpoint` | 提供的 Endpoint 是否针对单个存储桶(如果它针对根 API 端点,则为 false) | 默认为 `false` |
35
- | `acl` | 访问控制列表,上传资源的访问策略 | 默认为 `public-read` |
34
+ | `acl` | 访问控制列表,上传资源的访问策略 | 默认为 `public-read`, AWS 可选 `private"|"public-read"|"public-read-write"|"authenticated-read"|"aws-exec-read"|"bucket-owner-read"|"bucket-owner-full-control` |
36
35
 
37
36
  **上传路径支持 payload:**
38
37
 
package/dist/config.d.ts CHANGED
@@ -9,6 +9,5 @@ export interface IS3UserConfig {
9
9
  urlPrefix?: string;
10
10
  pathStyleAccess?: boolean;
11
11
  rejectUnauthorized?: boolean;
12
- bucketEndpoint?: boolean;
13
12
  acl?: string;
14
13
  }
package/dist/index.js CHANGED
@@ -13,7 +13,6 @@ module.exports = (ctx) => {
13
13
  uploadPath: "{year}/{month}/{md5}.{extName}",
14
14
  pathStyleAccess: false,
15
15
  rejectUnauthorized: true,
16
- bucketEndpoint: false,
17
16
  acl: "public-read",
18
17
  };
19
18
  let userConfig = ctx.getConfig("picBed.aws-s3");
@@ -95,14 +94,6 @@ module.exports = (ctx) => {
95
94
  required: false,
96
95
  alias: "rejectUnauthorized",
97
96
  },
98
- {
99
- name: "bucketEndpoint",
100
- type: "confirm",
101
- default: userConfig.bucketEndpoint || false,
102
- message: "提供的Endpoint是否针对单个存储桶(如果它针对根 API 端点,则为 false)",
103
- required: false,
104
- alias: "bucketEndpoint",
105
- },
106
97
  {
107
98
  name: "acl",
108
99
  type: "input",
@@ -123,32 +114,37 @@ module.exports = (ctx) => {
123
114
  }
124
115
  const client = uploader_1.default.createS3Client(userConfig);
125
116
  const output = ctx.output;
126
- const tasks = output.map((item, idx) => uploader_1.default.createUploadTask(client, userConfig.bucketName, (0, utils_1.formatPath)(item, userConfig.uploadPath), item, idx, userConfig.acl));
117
+ const tasks = output.map((item, idx) => uploader_1.default.createUploadTask({
118
+ client,
119
+ index: idx,
120
+ bucketName: userConfig.bucketName,
121
+ path: (0, utils_1.formatPath)(item, userConfig.uploadPath),
122
+ item: item,
123
+ acl: userConfig.acl,
124
+ urlPrefix: userConfig.urlPrefix,
125
+ }));
126
+ let results;
127
127
  try {
128
- const results = await Promise.all(tasks);
129
- for (const result of results) {
130
- const { index, url, imgURL } = result;
131
- delete output[index].buffer;
132
- delete output[index].base64Image;
133
- output[index].url = url;
134
- output[index].imgUrl = url;
135
- if (userConfig.urlPrefix) {
136
- output[index].url = `${userConfig.urlPrefix}/${imgURL}`;
137
- output[index].imgUrl = `${userConfig.urlPrefix}/${imgURL}`;
138
- }
139
- }
140
- return ctx;
128
+ results = await Promise.all(tasks);
141
129
  }
142
130
  catch (err) {
143
- ctx.log.error("上传到 Amazon S3 发生错误,请检查配置是否正确");
131
+ ctx.log.error("上传到 S3 存储发生错误,请检查网络连接和配置是否正确");
144
132
  ctx.log.error(err);
145
133
  ctx.emit("notification", {
146
- title: "Amazon S3 上传错误",
134
+ title: "S3 存储上传错误",
147
135
  body: "请检查配置是否正确",
148
136
  text: "",
149
137
  });
150
138
  throw err;
151
139
  }
140
+ for (const result of results) {
141
+ const { index, url, imgURL } = result;
142
+ delete output[index].buffer;
143
+ delete output[index].base64Image;
144
+ output[index].imgUrl = imgURL;
145
+ output[index].url = url;
146
+ }
147
+ return ctx;
152
148
  };
153
149
  const register = () => {
154
150
  ctx.helper.uploader.register("aws-s3", {
@@ -1,15 +1,29 @@
1
- import { S3 } from "aws-sdk";
1
+ import { S3Client } from "@aws-sdk/client-s3";
2
2
  import { IImgInfo } from "picgo";
3
3
  import { IS3UserConfig } from "./config";
4
4
  export interface IUploadResult {
5
+ index: number;
6
+ key: string;
5
7
  url: string;
6
8
  imgURL: string;
9
+ versionId?: string;
10
+ eTag?: string;
11
+ }
12
+ declare function createS3Client(opts: IS3UserConfig): S3Client;
13
+ interface createUploadTaskOpts {
14
+ client: S3Client;
15
+ bucketName: string;
16
+ path: string;
17
+ item: IImgInfo;
7
18
  index: number;
19
+ acl: string;
20
+ urlPrefix?: string;
8
21
  }
9
- declare function createS3Client(opts: IS3UserConfig): S3;
10
- declare function createUploadTask(s3: S3, bucketName: string, path: string, item: IImgInfo, index: number, acl: string): Promise<IUploadResult>;
22
+ declare function createUploadTask(opts: createUploadTaskOpts): Promise<IUploadResult>;
23
+ declare function getFileURL(opts: createUploadTaskOpts, eTag: string, versionId: string): Promise<string>;
11
24
  declare const _default: {
12
25
  createS3Client: typeof createS3Client;
13
26
  createUploadTask: typeof createUploadTask;
27
+ getFileURL: typeof getFileURL;
14
28
  };
15
29
  export default _default;
package/dist/uploader.js CHANGED
@@ -3,7 +3,9 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
3
3
  return (mod && mod.__esModule) ? mod : { "default": mod };
4
4
  };
5
5
  Object.defineProperty(exports, "__esModule", { value: true });
6
- const aws_sdk_1 = require("aws-sdk");
6
+ const client_s3_1 = require("@aws-sdk/client-s3");
7
+ const s3_request_presigner_1 = require("@aws-sdk/s3-request-presigner");
8
+ const node_http_handler_1 = require("@aws-sdk/node-http-handler");
7
9
  const utils_1 = require("./utils");
8
10
  const url_1 = __importDefault(require("url"));
9
11
  function createS3Client(opts) {
@@ -15,52 +17,92 @@ function createS3Client(opts) {
15
17
  catch (_a) {
16
18
  // eslint-disable-next-line no-empty
17
19
  }
18
- const s3 = new aws_sdk_1.S3({
19
- region: opts.region,
20
+ const httpHandlerOpts = {};
21
+ if (sslEnabled) {
22
+ httpHandlerOpts.httpsAgent = ((0, utils_1.getProxyAgent)(opts.proxy, true, opts.rejectUnauthorized));
23
+ }
24
+ else {
25
+ httpHandlerOpts.httpAgent = ((0, utils_1.getProxyAgent)(opts.proxy, false, opts.rejectUnauthorized));
26
+ }
27
+ const clientOptions = {
28
+ region: opts.region || "auto",
20
29
  endpoint: opts.endpoint,
21
- accessKeyId: opts.accessKeyID,
22
- secretAccessKey: opts.secretAccessKey,
23
- s3ForcePathStyle: opts.pathStyleAccess,
24
- s3BucketEndpoint: opts.bucketEndpoint,
25
- sslEnabled: sslEnabled,
26
- httpOptions: {
27
- agent: (0, utils_1.getProxyAgent)(opts.proxy, sslEnabled, opts.rejectUnauthorized),
30
+ credentials: {
31
+ accessKeyId: opts.accessKeyID,
32
+ secretAccessKey: opts.secretAccessKey,
28
33
  },
29
- });
30
- return s3;
34
+ tls: sslEnabled,
35
+ forcePathStyle: opts.pathStyleAccess,
36
+ requestHandler: new node_http_handler_1.NodeHttpHandler(httpHandlerOpts),
37
+ };
38
+ const client = new client_s3_1.S3Client(clientOptions);
39
+ return client;
31
40
  }
32
- function createUploadTask(s3, bucketName, path, item, index, acl) {
33
- return new Promise((resolve, reject) => {
34
- if (!item.buffer && !item.base64Image) {
35
- reject(new Error("undefined image"));
36
- }
37
- (0, utils_1.extractInfo)(item)
38
- .then(({ body, contentType, contentEncoding }) => {
39
- const opts = {
40
- Key: path,
41
- Bucket: bucketName,
42
- ACL: acl,
43
- Body: body,
44
- ContentType: contentType,
45
- ContentEncoding: contentEncoding,
46
- };
47
- s3.upload(opts)
48
- .promise()
49
- .then((result) => {
50
- resolve({
51
- url: result.Location,
52
- imgURL: result.Key,
53
- index,
54
- });
55
- })
56
- .catch((err) => reject(err));
57
- })
58
- .catch((err) => {
59
- reject(err);
60
- });
41
+ async function createUploadTask(opts) {
42
+ if (!opts.item.buffer && !opts.item.base64Image) {
43
+ return Promise.reject(new Error("undefined image"));
44
+ }
45
+ let body;
46
+ let contentType;
47
+ let contentEncoding;
48
+ try {
49
+ ;
50
+ ({ body, contentType, contentEncoding } = await (0, utils_1.extractInfo)(opts.item));
51
+ }
52
+ catch (err) {
53
+ return Promise.reject(err);
54
+ }
55
+ const command = new client_s3_1.PutObjectCommand({
56
+ Bucket: opts.bucketName,
57
+ Key: opts.path,
58
+ ACL: opts.acl,
59
+ Body: body,
60
+ ContentType: contentType,
61
+ ContentEncoding: contentEncoding,
61
62
  });
63
+ let output;
64
+ try {
65
+ output = await opts.client.send(command);
66
+ }
67
+ catch (err) {
68
+ return Promise.reject(err);
69
+ }
70
+ let url;
71
+ if (!opts.urlPrefix) {
72
+ try {
73
+ url = await getFileURL(opts, output.ETag, output.VersionId);
74
+ }
75
+ catch (err) {
76
+ return Promise.reject(err);
77
+ }
78
+ }
79
+ else {
80
+ url = `${opts.urlPrefix}/${opts.path}`;
81
+ }
82
+ return {
83
+ index: opts.index,
84
+ key: opts.path,
85
+ url: url,
86
+ imgURL: url,
87
+ versionId: output.VersionId,
88
+ eTag: output.ETag,
89
+ };
90
+ }
91
+ async function getFileURL(opts, eTag, versionId) {
92
+ try {
93
+ return await (0, s3_request_presigner_1.getSignedUrl)(opts.client, new client_s3_1.GetObjectCommand({
94
+ Bucket: opts.bucketName,
95
+ Key: opts.path,
96
+ IfMatch: eTag,
97
+ VersionId: versionId,
98
+ }), { expiresIn: 3600 });
99
+ }
100
+ catch (err) {
101
+ return Promise.reject(err);
102
+ }
62
103
  }
63
104
  exports.default = {
64
105
  createS3Client,
65
106
  createUploadTask,
107
+ getFileURL,
66
108
  };
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "picgo-plugin-s3",
3
- "version": "1.2.4",
3
+ "version": "1.3.0",
4
4
  "description": "picgo amazon s3 uploader",
5
5
  "main": "dist/index.js",
6
6
  "publishConfig": {
@@ -39,7 +39,9 @@
39
39
  "typescript": "^4.8.4"
40
40
  },
41
41
  "dependencies": {
42
- "aws-sdk": "^2.839.0",
42
+ "@aws-sdk/client-s3": "^3.304.0",
43
+ "@aws-sdk/lib-storage": "^3.304.0",
44
+ "@aws-sdk/s3-request-presigner": "^3.305.0",
43
45
  "file-type": "^16.2.0",
44
46
  "hpagent": "^1.2.0",
45
47
  "mime": "^2.5.2"