picgo-plugin-s3 1.1.0 → 1.1.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +12 -0
- package/dist/index.js +29 -3
- package/dist/uploader.d.ts +2 -2
- package/dist/uploader.js +16 -20
- package/dist/utils.d.ts +6 -0
- package/dist/utils.js +27 -2
- package/package.json +6 -5
package/README.md
CHANGED
|
@@ -29,6 +29,8 @@ picgo set uploader aws-s3
|
|
|
29
29
|
| `endpoint` | 指定自定义终端节点 | `s3.us-west-2.amazonaws.com` |
|
|
30
30
|
| `region` | 指定执行服务请求的区域 | `us-west-1` |
|
|
31
31
|
| `pathStyleAccess` | 是否启用 S3 Path style | 默认为 `false`,使用 minio 请设置为 `true` |
|
|
32
|
+
| `rejectUnauthorized` | 是否拒绝无效TLS证书连接 | 默认为 `true`,如上传失败日志显示证书问题可设置为`false`|
|
|
33
|
+
| `acl` | 访问控制列表,上传资源的访问策略 | 默认为 `public-read` |
|
|
32
34
|
|
|
33
35
|
**上传路径支持 payload:**
|
|
34
36
|
|
|
@@ -59,6 +61,16 @@ picgo set uploader aws-s3
|
|
|
59
61
|
|
|
60
62
|
如果 PicGo 像以上配置,执行上传:`picgo upload sample.png`,则最终得到图片地址为:`https://img.example.com/2021/4aa4f41e38817e5fd38ac870f40dbc70.jpg`
|
|
61
63
|
|
|
64
|
+
## 发布 Publish
|
|
65
|
+
|
|
66
|
+
With the following command, a versioned commit which modifies the `version` of `package.json` would be genereated and pushed to the origin. Github Action will automatically compile this pacakage and publish it to NPM.
|
|
67
|
+
|
|
68
|
+
```sh
|
|
69
|
+
npm run patch
|
|
70
|
+
npm run minor
|
|
71
|
+
npm run major
|
|
72
|
+
```
|
|
73
|
+
|
|
62
74
|
## 贡献 Contributing
|
|
63
75
|
|
|
64
76
|
Pull requests are welcome. For major changes, please open an issue first to discuss what you would like to change.
|
package/dist/index.js
CHANGED
|
@@ -11,7 +11,9 @@ module.exports = (ctx) => {
|
|
|
11
11
|
secretAccessKey: '',
|
|
12
12
|
bucketName: '',
|
|
13
13
|
uploadPath: '{year}/{month}/{md5}.{extName}',
|
|
14
|
-
pathStyleAccess: false
|
|
14
|
+
pathStyleAccess: false,
|
|
15
|
+
rejectUnauthorized: true,
|
|
16
|
+
acl: 'public-read'
|
|
15
17
|
};
|
|
16
18
|
let userConfig = ctx.getConfig('picBed.aws-s3');
|
|
17
19
|
userConfig = Object.assign(Object.assign({}, defaultConfig), (userConfig || {}));
|
|
@@ -46,6 +48,14 @@ module.exports = (ctx) => {
|
|
|
46
48
|
required: true,
|
|
47
49
|
alias: '文件路径'
|
|
48
50
|
},
|
|
51
|
+
{
|
|
52
|
+
name: 'acl',
|
|
53
|
+
type: 'input',
|
|
54
|
+
default: userConfig.acl,
|
|
55
|
+
message: '文件访问权限',
|
|
56
|
+
required: true,
|
|
57
|
+
alias: '权限'
|
|
58
|
+
},
|
|
49
59
|
{
|
|
50
60
|
name: 'region',
|
|
51
61
|
type: 'input',
|
|
@@ -75,6 +85,22 @@ module.exports = (ctx) => {
|
|
|
75
85
|
message: 'enable path-style-access or not',
|
|
76
86
|
required: false,
|
|
77
87
|
alias: 'PathStyleAccess'
|
|
88
|
+
},
|
|
89
|
+
{
|
|
90
|
+
name: 'rejectUnauthorized',
|
|
91
|
+
type: 'confirm',
|
|
92
|
+
default: userConfig.rejectUnauthorized || true,
|
|
93
|
+
message: '是否拒绝无效TLS证书连接',
|
|
94
|
+
required: false,
|
|
95
|
+
alias: 'rejectUnauthorized'
|
|
96
|
+
},
|
|
97
|
+
{
|
|
98
|
+
name: 'acl',
|
|
99
|
+
type: 'input',
|
|
100
|
+
default: userConfig.acl || 'public-read',
|
|
101
|
+
message: '上传资源的访问策略',
|
|
102
|
+
required: false,
|
|
103
|
+
alias: 'ACL 访问控制列表'
|
|
78
104
|
}
|
|
79
105
|
];
|
|
80
106
|
};
|
|
@@ -86,9 +112,9 @@ module.exports = (ctx) => {
|
|
|
86
112
|
if (userConfig.urlPrefix) {
|
|
87
113
|
userConfig.urlPrefix = userConfig.urlPrefix.replace(/\/?$/, '');
|
|
88
114
|
}
|
|
89
|
-
const client = uploader_1.default.createS3Client(userConfig.accessKeyID, userConfig.secretAccessKey, userConfig.region, userConfig.endpoint, userConfig.pathStyleAccess);
|
|
115
|
+
const client = uploader_1.default.createS3Client(userConfig.accessKeyID, userConfig.secretAccessKey, userConfig.region, userConfig.endpoint, userConfig.pathStyleAccess, userConfig.rejectUnauthorized);
|
|
90
116
|
const output = ctx.output;
|
|
91
|
-
const tasks = output.map((item, idx) => uploader_1.default.createUploadTask(client, userConfig.bucketName, utils_1.formatPath(item, userConfig.uploadPath), item, idx));
|
|
117
|
+
const tasks = output.map((item, idx) => uploader_1.default.createUploadTask(client, userConfig.bucketName, utils_1.formatPath(item, userConfig.uploadPath), item, idx, userConfig.acl));
|
|
92
118
|
try {
|
|
93
119
|
const results = await Promise.all(tasks);
|
|
94
120
|
for (let result of results) {
|
package/dist/uploader.d.ts
CHANGED
|
@@ -5,8 +5,8 @@ export interface IUploadResult {
|
|
|
5
5
|
imgURL: string;
|
|
6
6
|
index: number;
|
|
7
7
|
}
|
|
8
|
-
declare function createS3Client(accessKeyID: string, secretAccessKey: string, region: string, endpoint: string, pathStyleAccess: boolean): AWS.S3;
|
|
9
|
-
declare function createUploadTask(s3: AWS.S3, bucketName: string, path: string, item: IImgInfo, index: number): Promise<IUploadResult>;
|
|
8
|
+
declare function createS3Client(accessKeyID: string, secretAccessKey: string, region: string, endpoint: string, pathStyleAccess: boolean, rejectUnauthorized: boolean): AWS.S3;
|
|
9
|
+
declare function createUploadTask(s3: AWS.S3, bucketName: string, path: string, item: IImgInfo, index: number, acl: string): Promise<IUploadResult>;
|
|
10
10
|
declare const _default: {
|
|
11
11
|
createS3Client: typeof createS3Client;
|
|
12
12
|
createUploadTask: typeof createUploadTask;
|
package/dist/uploader.js
CHANGED
|
@@ -4,41 +4,37 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
|
4
4
|
};
|
|
5
5
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
6
|
const aws_sdk_1 = __importDefault(require("aws-sdk"));
|
|
7
|
-
const
|
|
8
|
-
|
|
7
|
+
const https_1 = __importDefault(require("https"));
|
|
8
|
+
const utils_1 = require("./utils");
|
|
9
|
+
function createS3Client(accessKeyID, secretAccessKey, region, endpoint, pathStyleAccess, rejectUnauthorized) {
|
|
9
10
|
const s3 = new aws_sdk_1.default.S3({
|
|
10
11
|
region,
|
|
11
12
|
endpoint,
|
|
12
13
|
accessKeyId: accessKeyID,
|
|
13
14
|
secretAccessKey: secretAccessKey,
|
|
14
|
-
s3ForcePathStyle: pathStyleAccess
|
|
15
|
+
s3ForcePathStyle: pathStyleAccess,
|
|
16
|
+
httpOptions: {
|
|
17
|
+
agent: new https_1.default.Agent({
|
|
18
|
+
rejectUnauthorized: rejectUnauthorized,
|
|
19
|
+
})
|
|
20
|
+
}
|
|
15
21
|
});
|
|
16
22
|
return s3;
|
|
17
23
|
}
|
|
18
|
-
function createUploadTask(s3, bucketName, path, item, index) {
|
|
24
|
+
function createUploadTask(s3, bucketName, path, item, index, acl) {
|
|
19
25
|
return new Promise(async (resolve, reject) => {
|
|
20
26
|
if (!item.buffer && !item.base64Image) {
|
|
21
27
|
reject(new Error('undefined image'));
|
|
22
28
|
}
|
|
29
|
+
const { body, contentType, contentEncoding } = await utils_1.extractInfo(item);
|
|
23
30
|
const opts = {
|
|
24
31
|
Key: path,
|
|
25
|
-
Bucket: bucketName
|
|
32
|
+
Bucket: bucketName,
|
|
33
|
+
ACL: acl,
|
|
34
|
+
Body: body,
|
|
35
|
+
ContentType: contentType,
|
|
36
|
+
ContentEncoding: contentEncoding,
|
|
26
37
|
};
|
|
27
|
-
if (item.buffer) {
|
|
28
|
-
opts.Body = item.buffer;
|
|
29
|
-
// 不知道是否会发生未获取到 mime 类型就上传文件的情况
|
|
30
|
-
const fileType = await file_type_1.default.fromBuffer(item.buffer);
|
|
31
|
-
opts.ContentType = fileType.mime;
|
|
32
|
-
}
|
|
33
|
-
else {
|
|
34
|
-
let data = item.base64Image;
|
|
35
|
-
const format = data.substring(data.indexOf('data:') + 5, data.indexOf(';base64'));
|
|
36
|
-
data = data.replace(/^data:image\/\w+;base64,/, '');
|
|
37
|
-
const buf = Buffer.from(data, 'base64');
|
|
38
|
-
opts.Body = buf;
|
|
39
|
-
opts.ContentEncoding = 'base64';
|
|
40
|
-
opts.ContentType = format;
|
|
41
|
-
}
|
|
42
38
|
s3.upload(opts)
|
|
43
39
|
.promise()
|
|
44
40
|
.then((result) => {
|
package/dist/utils.d.ts
CHANGED
|
@@ -1,2 +1,8 @@
|
|
|
1
|
+
/// <reference types="node" />
|
|
1
2
|
import { IImgInfo } from 'picgo/dist/src/types';
|
|
2
3
|
export declare function formatPath(info: IImgInfo, format?: string): string;
|
|
4
|
+
export declare function extractInfo(info: IImgInfo): Promise<{
|
|
5
|
+
body?: Buffer;
|
|
6
|
+
contentType?: string;
|
|
7
|
+
contentEncoding?: string;
|
|
8
|
+
}>;
|
package/dist/utils.js
CHANGED
|
@@ -3,8 +3,10 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
|
3
3
|
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
4
|
};
|
|
5
5
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
-
exports.formatPath = void 0;
|
|
6
|
+
exports.extractInfo = exports.formatPath = void 0;
|
|
7
7
|
const crypto_1 = __importDefault(require("crypto"));
|
|
8
|
+
const file_type_1 = __importDefault(require("file-type"));
|
|
9
|
+
const mime_1 = __importDefault(require("mime"));
|
|
8
10
|
class FileNameGenerator {
|
|
9
11
|
constructor(info) {
|
|
10
12
|
this.date = new Date();
|
|
@@ -44,7 +46,7 @@ class FileNameGenerator {
|
|
|
44
46
|
imgBuffer() {
|
|
45
47
|
return this.info.base64Image
|
|
46
48
|
? this.info.base64Image
|
|
47
|
-
: this.info.buffer
|
|
49
|
+
: this.info.buffer;
|
|
48
50
|
}
|
|
49
51
|
}
|
|
50
52
|
FileNameGenerator.fields = [
|
|
@@ -71,3 +73,26 @@ function formatPath(info, format) {
|
|
|
71
73
|
return formatPath;
|
|
72
74
|
}
|
|
73
75
|
exports.formatPath = formatPath;
|
|
76
|
+
async function extractInfo(info) {
|
|
77
|
+
var _a;
|
|
78
|
+
let result = {};
|
|
79
|
+
if (info.base64Image) {
|
|
80
|
+
const body = info.base64Image.replace(/^data:[/\w]+;base64,/, '');
|
|
81
|
+
result.contentType = (_a = info.base64Image.match(/[^:]\w+\/[\w-+\d.]+(?=;|,)/)) === null || _a === void 0 ? void 0 : _a[0];
|
|
82
|
+
result.body = Buffer.from(body, 'base64');
|
|
83
|
+
result.contentEncoding = 'base64';
|
|
84
|
+
}
|
|
85
|
+
else {
|
|
86
|
+
if (info.extname) {
|
|
87
|
+
result.contentType = mime_1.default.getType(info.extname);
|
|
88
|
+
}
|
|
89
|
+
result.body = info.buffer;
|
|
90
|
+
}
|
|
91
|
+
// fallback to detect from buffer
|
|
92
|
+
if (!result.contentType) {
|
|
93
|
+
const fileType = await file_type_1.default.fromBuffer(result.body);
|
|
94
|
+
result.contentType = fileType === null || fileType === void 0 ? void 0 : fileType.mime;
|
|
95
|
+
}
|
|
96
|
+
return result;
|
|
97
|
+
}
|
|
98
|
+
exports.extractInfo = extractInfo;
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "picgo-plugin-s3",
|
|
3
|
-
"version": "1.1.
|
|
3
|
+
"version": "1.1.4",
|
|
4
4
|
"description": "picgo amazon s3 uploader",
|
|
5
5
|
"main": "dist/index.js",
|
|
6
6
|
"publishConfig": {
|
|
@@ -12,9 +12,9 @@
|
|
|
12
12
|
"build": "tsc -p .",
|
|
13
13
|
"dev": "tsc -w -p .",
|
|
14
14
|
"pub": "rm -rf dist/* && npm build && npm publish",
|
|
15
|
-
"patch": "npm version patch && git push origin
|
|
16
|
-
"minor": "npm version minor && git push origin
|
|
17
|
-
"major": "npm version major && git push origin
|
|
15
|
+
"patch": "npm version patch -m 'bump version into v%s' && git push origin main && git push origin --tags",
|
|
16
|
+
"minor": "npm version minor -m 'bump version into v%s' && git push origin main && git push origin --tags",
|
|
17
|
+
"major": "npm version major -m 'bump version into v%s' && git push origin main && git push origin --tags"
|
|
18
18
|
},
|
|
19
19
|
"keywords": [
|
|
20
20
|
"amazon-s3",
|
|
@@ -41,6 +41,7 @@
|
|
|
41
41
|
},
|
|
42
42
|
"dependencies": {
|
|
43
43
|
"aws-sdk": "^2.839.0",
|
|
44
|
-
"file-type": "^16.2.0"
|
|
44
|
+
"file-type": "^16.2.0",
|
|
45
|
+
"mime": "^2.5.2"
|
|
45
46
|
}
|
|
46
47
|
}
|