node-automator 1.2.0 → 1.3.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/commands/ali_oss.js +125 -0
- package/commands/mgr.js +2 -0
- package/package.json +2 -1
- package/utils/func_tool.js +106 -0
- package/utils/request_tool.js +2 -1
|
@@ -0,0 +1,125 @@
|
|
|
1
|
+
const {
|
|
2
|
+
get_file_list,
|
|
3
|
+
get_full_path,
|
|
4
|
+
format_bytes
|
|
5
|
+
} = require("../utils/file_tool");
|
|
6
|
+
const {
|
|
7
|
+
success,
|
|
8
|
+
getPrint,
|
|
9
|
+
info,
|
|
10
|
+
warn,
|
|
11
|
+
whisper
|
|
12
|
+
} = require("../utils/log_tool");
|
|
13
|
+
const {
|
|
14
|
+
BaseCommand
|
|
15
|
+
} = require("./base");
|
|
16
|
+
const path = require("path");
|
|
17
|
+
const {
|
|
18
|
+
progress
|
|
19
|
+
} = require("../utils/display_tool");
|
|
20
|
+
const {
|
|
21
|
+
createWriteStream, readFileSync
|
|
22
|
+
} = require("fs");
|
|
23
|
+
const {
|
|
24
|
+
getCache,
|
|
25
|
+
setCache
|
|
26
|
+
} = require("../utils/cache_tool");
|
|
27
|
+
const crypto = require("crypto");
|
|
28
|
+
const { queueAsync } = require('../utils/func_tool');
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
class AliOssCommand extends BaseCommand {
|
|
32
|
+
async execute() {
|
|
33
|
+
|
|
34
|
+
let config = this.selfData.config;
|
|
35
|
+
let mode = this.selfData.mode;
|
|
36
|
+
|
|
37
|
+
return await new Promise((resolve, reject) => {
|
|
38
|
+
const OSS = require('ali-oss');
|
|
39
|
+
// 初始化OSS客户端。请将以下参数替换为您自己的配置信息。
|
|
40
|
+
const client = new OSS({
|
|
41
|
+
region: config.region, // 示例:'oss-cn-hangzhou',填写Bucket所在地域。
|
|
42
|
+
accessKeyId: config.access_key_id, // 确保已设置环境变量OSS_ACCESS_KEY_ID。
|
|
43
|
+
accessKeySecret: config.access_key_secret, // 确保已设置环境变量OSS_ACCESS_KEY_SECRET。
|
|
44
|
+
bucket: config.bucket, // 示例:'my-bucket-name',填写存储空间名称。
|
|
45
|
+
});
|
|
46
|
+
|
|
47
|
+
switch (mode) {
|
|
48
|
+
case "DOWNLOAD": {
|
|
49
|
+
let src = this.selfData.src;
|
|
50
|
+
let dst = get_full_path(this.selfData.dst);
|
|
51
|
+
client.get(src, dst).then((result) => {
|
|
52
|
+
resolve(result);
|
|
53
|
+
}).catch((err) => {
|
|
54
|
+
reject(err);
|
|
55
|
+
});
|
|
56
|
+
break;
|
|
57
|
+
}
|
|
58
|
+
case "UPLOAD_FILE":
|
|
59
|
+
case "UPLOAD": {
|
|
60
|
+
let dst = this.selfData.dst;
|
|
61
|
+
let src = get_full_path(this.selfData.src);
|
|
62
|
+
info("");
|
|
63
|
+
info(`开始上传 ${src} 到 ${dst}`);
|
|
64
|
+
client.put(dst, src).then((result) => {
|
|
65
|
+
resolve(result);
|
|
66
|
+
})
|
|
67
|
+
.catch((err) => {
|
|
68
|
+
reject(err);
|
|
69
|
+
});
|
|
70
|
+
break;
|
|
71
|
+
}
|
|
72
|
+
case "UPLOAD_FILES": {
|
|
73
|
+
let dst_folder = this.selfData.dst;
|
|
74
|
+
let concurrency = this.selfData.options && this.selfData.options.concurrency || 10;
|
|
75
|
+
let base = this.selfData.base;
|
|
76
|
+
whisper(`在 ${base} 中收集要上传的文件...`, undefined, true);
|
|
77
|
+
let srcs = get_file_list(this.selfData.src, undefined, true);
|
|
78
|
+
let retryTimes = 0;
|
|
79
|
+
let maxRetryTimes = 3;
|
|
80
|
+
function uploadFiles(srcs) {
|
|
81
|
+
let tasks = srcs.map((src) => {
|
|
82
|
+
let dst = path.join(dst_folder, path.relative(base, src)).replace(/\\/g, "/")
|
|
83
|
+
return client.put.bind(client, dst, src);
|
|
84
|
+
});
|
|
85
|
+
queueAsync(tasks, concurrency, (done, total) => {
|
|
86
|
+
progress(done, total, {
|
|
87
|
+
desc: "上传中...",
|
|
88
|
+
depth: 0,
|
|
89
|
+
color: "gray",
|
|
90
|
+
});
|
|
91
|
+
}).then((result) => {
|
|
92
|
+
var retrySrcs = [];
|
|
93
|
+
for(var i = 0; i < result.length; i++) {
|
|
94
|
+
if (result[i].err) {
|
|
95
|
+
retrySrcs.push(srcs[i]);
|
|
96
|
+
warner(`上传文件 ${srcs[i]} 失败: ${result[i].err}`);
|
|
97
|
+
}
|
|
98
|
+
}
|
|
99
|
+
if (retrySrcs.length > 0) {
|
|
100
|
+
retryTimes++;
|
|
101
|
+
if (retryTimes > maxRetryTimes) {
|
|
102
|
+
reject(`上传文件失败,重试次数超过 ${maxRetryTimes} 次。`);
|
|
103
|
+
return;
|
|
104
|
+
}
|
|
105
|
+
uploadFiles(retrySrcs);
|
|
106
|
+
} else {
|
|
107
|
+
resolve(true);
|
|
108
|
+
}
|
|
109
|
+
});
|
|
110
|
+
}
|
|
111
|
+
uploadFiles(srcs);
|
|
112
|
+
break;
|
|
113
|
+
}
|
|
114
|
+
default: {
|
|
115
|
+
resolve(false);
|
|
116
|
+
break;
|
|
117
|
+
}
|
|
118
|
+
}
|
|
119
|
+
});
|
|
120
|
+
}
|
|
121
|
+
}
|
|
122
|
+
|
|
123
|
+
module.exports = {
|
|
124
|
+
AliOssCommand,
|
|
125
|
+
};
|
package/commands/mgr.js
CHANGED
|
@@ -115,6 +115,7 @@ const { ImageCropCommand } = require('./image_crop');
|
|
|
115
115
|
const { CursorUpCommand } = require('./cursor_up');
|
|
116
116
|
const { RegisterSearchInstallLocationCommand } = require('./register_search_install_location');
|
|
117
117
|
const { DeobfuscateCommand } = require('./deobfuscate');
|
|
118
|
+
const { AliOssCommand } = require('./ali_oss');
|
|
118
119
|
|
|
119
120
|
const globalData = {
|
|
120
121
|
"executed_cfg": [], // 执行过的配置文件
|
|
@@ -471,6 +472,7 @@ function init() {
|
|
|
471
472
|
register("cursor_up", CursorUpCommand, false);
|
|
472
473
|
register("register_search_install_location", RegisterSearchInstallLocationCommand, false);
|
|
473
474
|
register("deobfuscate", DeobfuscateCommand, false);
|
|
475
|
+
register("ali_oss", AliOssCommand, false);
|
|
474
476
|
}
|
|
475
477
|
|
|
476
478
|
module.exports = {
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "node-automator",
|
|
3
|
-
"version": "1.
|
|
3
|
+
"version": "1.3.0",
|
|
4
4
|
"description": "Execute automation with yaml configuration(compatible with json)",
|
|
5
5
|
"main": "index.js",
|
|
6
6
|
"repository": {
|
|
@@ -18,6 +18,7 @@
|
|
|
18
18
|
"license": "ISC",
|
|
19
19
|
"dependencies": {
|
|
20
20
|
"adm-zip": "^0.5.1",
|
|
21
|
+
"ali-oss": "^6.20.0",
|
|
21
22
|
"await-event-emitter": "^2.0.2",
|
|
22
23
|
"change-case": "^4.1.2",
|
|
23
24
|
"chardet": "^1.3.0",
|
|
@@ -0,0 +1,106 @@
|
|
|
1
|
+
function queue(fn, limit, trigger, flushTrigger, toggleTrigger) {
|
|
2
|
+
var argQueues = [];
|
|
3
|
+
var context = null;
|
|
4
|
+
var availableCount = limit;
|
|
5
|
+
var isWaiting = false;
|
|
6
|
+
var permitAll = false;
|
|
7
|
+
|
|
8
|
+
function runQueue() {
|
|
9
|
+
if (!argQueues.length) return;
|
|
10
|
+
if (availableCount > 0 || permitAll) {
|
|
11
|
+
var runNum = permitAll ? argQueues.length : Math.min(availableCount, argQueues.length);
|
|
12
|
+
var argsToRun = argQueues.splice(0, runNum);
|
|
13
|
+
argsToRun.forEach(function (item) {
|
|
14
|
+
fn.apply(context, item);
|
|
15
|
+
});
|
|
16
|
+
if (!permitAll) {
|
|
17
|
+
availableCount -= runNum;
|
|
18
|
+
}
|
|
19
|
+
}
|
|
20
|
+
if (!isWaiting) {
|
|
21
|
+
isWaiting = true;
|
|
22
|
+
trigger(reset);
|
|
23
|
+
}
|
|
24
|
+
}
|
|
25
|
+
|
|
26
|
+
function reset() {
|
|
27
|
+
isWaiting = false;
|
|
28
|
+
availableCount = limit;
|
|
29
|
+
runQueue();
|
|
30
|
+
}
|
|
31
|
+
|
|
32
|
+
function toggle() {
|
|
33
|
+
permitAll = !permitAll;
|
|
34
|
+
runQueue();
|
|
35
|
+
}
|
|
36
|
+
function flush() {
|
|
37
|
+
var argsToRun = argQueues.splice(0, argQueues.length);
|
|
38
|
+
argsToRun.forEach(function (item) {
|
|
39
|
+
fn.apply(context, item);
|
|
40
|
+
});
|
|
41
|
+
}
|
|
42
|
+
var queued = function () {
|
|
43
|
+
var args = arguments;
|
|
44
|
+
argQueues.push(args);
|
|
45
|
+
context = this;
|
|
46
|
+
runQueue();
|
|
47
|
+
};
|
|
48
|
+
flushTrigger && flushTrigger(flush);
|
|
49
|
+
toggleTrigger && toggleTrigger(toggle);
|
|
50
|
+
Object.defineProperty(queued, "status", {
|
|
51
|
+
get: function() {
|
|
52
|
+
return {
|
|
53
|
+
queuedCount: argQueues.length,
|
|
54
|
+
limit: limit,
|
|
55
|
+
availableCount: availableCount,
|
|
56
|
+
isWaiting: isWaiting,
|
|
57
|
+
permitAll: permitAll,
|
|
58
|
+
}
|
|
59
|
+
},
|
|
60
|
+
});
|
|
61
|
+
return queued;
|
|
62
|
+
}
|
|
63
|
+
|
|
64
|
+
function queueAsync(tasks, limit, onProgress) {
|
|
65
|
+
return new Promise(function (resolve) {
|
|
66
|
+
var result = [];
|
|
67
|
+
if (!tasks.length) resolve(result);
|
|
68
|
+
var index = 0;
|
|
69
|
+
var numDone = 0;
|
|
70
|
+
onProgress && onProgress(numDone, tasks.length);
|
|
71
|
+
function checkDone() {
|
|
72
|
+
onProgress && onProgress(numDone, tasks.length);
|
|
73
|
+
if (numDone == tasks.length) {
|
|
74
|
+
resolve(result);
|
|
75
|
+
return;
|
|
76
|
+
}
|
|
77
|
+
if (index < tasks.length) {
|
|
78
|
+
_run();
|
|
79
|
+
}
|
|
80
|
+
};
|
|
81
|
+
function _run() {
|
|
82
|
+
const task = tasks[index++];
|
|
83
|
+
task().then(function (val) {
|
|
84
|
+
numDone++;
|
|
85
|
+
result.push({
|
|
86
|
+
val,
|
|
87
|
+
});
|
|
88
|
+
checkDone();
|
|
89
|
+
}).catch(function (err) {
|
|
90
|
+
numDone++;
|
|
91
|
+
result.push({
|
|
92
|
+
err,
|
|
93
|
+
});
|
|
94
|
+
checkDone();
|
|
95
|
+
});
|
|
96
|
+
}
|
|
97
|
+
for(var i = 0; i < tasks.length && i < limit; i++) {
|
|
98
|
+
_run();
|
|
99
|
+
}
|
|
100
|
+
});
|
|
101
|
+
}
|
|
102
|
+
|
|
103
|
+
module.exports = {
|
|
104
|
+
queue,
|
|
105
|
+
queueAsync,
|
|
106
|
+
};
|
package/utils/request_tool.js
CHANGED
|
@@ -39,13 +39,14 @@ async function doRequest(data) {
|
|
|
39
39
|
let quiet = data.quiet;
|
|
40
40
|
!quiet && info(useCache ? "[缓存]" : `[${options.method}] ${url} `);
|
|
41
41
|
let hasProgress = false;
|
|
42
|
+
let fileDst = get_full_path(data.dst || shareData.AUTOMATOR_SCRATCH + "/request_tool/" + cache_name, "FILE");
|
|
42
43
|
if (useCache) {
|
|
43
44
|
result = http_cache;
|
|
45
|
+
fs.writeFileSync(fileDst, http_cache);
|
|
44
46
|
} else {
|
|
45
47
|
let chunks = [];
|
|
46
48
|
result = await new Promise(resolve => {
|
|
47
49
|
let sizeTotal = 0;
|
|
48
|
-
let fileDst = get_full_path(data.dst || shareData.AUTOMATOR_SCRATCH + "/request_tool/" + cache_name, "FILE");
|
|
49
50
|
let cacheDir = path.dirname(cache_path);
|
|
50
51
|
if (!fs.existsSync(cacheDir)) {
|
|
51
52
|
fs.mkdirSync(cacheDir, {
|