@zwa73/dev-utils 1.0.94 → 1.0.95
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/data/template/base/tsconfig.json +0 -2
- package/data/template/cjs/tsconfig.json +0 -2
- package/dist/Command/Archive.d.ts +3 -0
- package/dist/Command/Archive.js +267 -0
- package/dist/Command/CheckToken.d.ts +1 -1
- package/dist/Command/CheckToken.js +5 -5
- package/dist/Command/MapPath.js +1 -1
- package/dist/Command/Release.js +1 -1
- package/dist/Command/Route.js +2 -0
- package/dist/Command/ScanDups.js +5 -5
- package/package.json +1 -1
|
@@ -0,0 +1,267 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
+
};
|
|
5
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
+
exports.CmdArchive = void 0;
|
|
7
|
+
const fs_1 = __importDefault(require("fs"));
|
|
8
|
+
const utils_1 = require("@zwa73/utils");
|
|
9
|
+
const pathe_1 = __importDefault(require("pathe"));
|
|
10
|
+
/**命令执行选项 */
|
|
11
|
+
const execopt = { nodeModules: 'None' };
|
|
12
|
+
/**归档命令定义 */
|
|
13
|
+
const CmdArchive = (program) => program
|
|
14
|
+
.command("Archive")
|
|
15
|
+
.alias("compress")
|
|
16
|
+
.alias("archive")
|
|
17
|
+
.description("归档并压缩文件/目录")
|
|
18
|
+
.argument("<input...>", "文件或目录")
|
|
19
|
+
.option("-g, --glob", "批处理模式", false)
|
|
20
|
+
.option("-d, --dir", "glob 模式下是否包含目录", false)
|
|
21
|
+
.option("-k, --keep", "是否保留tar打包文件", false)
|
|
22
|
+
.option("--no-test", "不测试文件", false)
|
|
23
|
+
.option("-p, --parallel <num>", "并行数,默认 1", parseInt, 1)
|
|
24
|
+
.option("-f, --format <fmt>", `归档格式: ${Object.keys(compressFuncTable).map(v => ` ${v} `).join('|')}`, "zstd")
|
|
25
|
+
.action(async (input, opt) => {
|
|
26
|
+
if (!Object.keys(compressFuncTable).includes(opt.format)) {
|
|
27
|
+
utils_1.SLogger.warn(`${opt.format} 不是有效的格式`);
|
|
28
|
+
return;
|
|
29
|
+
}
|
|
30
|
+
if (opt.parallel == null || opt.parallel < 1 || isNaN(opt.parallel)) {
|
|
31
|
+
utils_1.SLogger.warn(`${opt.parallel} 不是有效的并行数`);
|
|
32
|
+
return;
|
|
33
|
+
}
|
|
34
|
+
const cwd = process.cwd();
|
|
35
|
+
const dirlist = input.map(v => pathe_1.default.dirname(pathe_1.default.resolve(cwd, v)));
|
|
36
|
+
if (dirlist.some(v => v != dirlist[0])) {
|
|
37
|
+
utils_1.SLogger.warn(`输入路径必须在同一目录下`);
|
|
38
|
+
return;
|
|
39
|
+
}
|
|
40
|
+
// 批处理模式
|
|
41
|
+
if (opt.glob) {
|
|
42
|
+
const list = (await utils_1.UtilFT.fileSearchGlob(cwd, input, {
|
|
43
|
+
nodir: opt.dir ? false : undefined
|
|
44
|
+
})).map(fp => ({
|
|
45
|
+
list: [pathe_1.default.basename(fp)],
|
|
46
|
+
rel: pathe_1.default.relative(cwd, fp),
|
|
47
|
+
abs: fp,
|
|
48
|
+
base: pathe_1.default.basename(fp),
|
|
49
|
+
absdir: pathe_1.default.dirname(fp),
|
|
50
|
+
}));
|
|
51
|
+
if (!list.length)
|
|
52
|
+
return void utils_1.SLogger.warn(`未找到任何匹配项: ${input}`);
|
|
53
|
+
await utils_1.Stream.from(list, opt.parallel).map(async (input) => {
|
|
54
|
+
return compressOne({ input, ...opt }, opt.format);
|
|
55
|
+
}).apply();
|
|
56
|
+
return;
|
|
57
|
+
}
|
|
58
|
+
const dirfilename = pathe_1.default.basename(pathe_1.default.dirname(input[0]));
|
|
59
|
+
// 单文件模式
|
|
60
|
+
await compressOne({ input: {
|
|
61
|
+
list: input.map(fp => pathe_1.default.basename(fp)),
|
|
62
|
+
abs: input.length > 1
|
|
63
|
+
? pathe_1.default.join(pathe_1.default.resolve(cwd, pathe_1.default.dirname(input[0])), dirfilename)
|
|
64
|
+
: pathe_1.default.resolve(cwd, input[0]),
|
|
65
|
+
rel: input.length > 1
|
|
66
|
+
? pathe_1.default.join(pathe_1.default.relative(cwd, pathe_1.default.dirname(input[0])), dirfilename)
|
|
67
|
+
: pathe_1.default.relative(cwd, input[0]),
|
|
68
|
+
base: input.length > 1
|
|
69
|
+
? pathe_1.default.basename(pathe_1.default.dirname(input[0]))
|
|
70
|
+
: pathe_1.default.basename(input[0]),
|
|
71
|
+
absdir: pathe_1.default.dirname(pathe_1.default.resolve(cwd, input[0])),
|
|
72
|
+
}, ...opt }, opt.format);
|
|
73
|
+
});
|
|
74
|
+
exports.CmdArchive = CmdArchive;
|
|
75
|
+
/** 格式化字节大小为人类可读格式 (Windows 风格) */
|
|
76
|
+
const formatSize = (bytes) => {
|
|
77
|
+
if (bytes === 0)
|
|
78
|
+
return "0 Bytes";
|
|
79
|
+
const units = ["Bytes", "KB", "MB", "GB", "TB"];
|
|
80
|
+
const i = Math.floor(Math.log(bytes) / Math.log(1024));
|
|
81
|
+
const size = bytes / Math.pow(1024, i);
|
|
82
|
+
// 只有 Bytes 不保留小数,其他保留两位
|
|
83
|
+
return `${i === 0 ? size : size.toFixed(2)} ${units[i]}`;
|
|
84
|
+
};
|
|
85
|
+
/** 打印压缩率报告 */
|
|
86
|
+
const logCompressionRatio = async (sourcePath, targetPath) => {
|
|
87
|
+
try {
|
|
88
|
+
const sourceStat = await fs_1.default.promises.stat(sourcePath);
|
|
89
|
+
const targetStat = await fs_1.default.promises.stat(targetPath);
|
|
90
|
+
const srcSize = sourceStat.size;
|
|
91
|
+
const tgtSize = targetStat.size;
|
|
92
|
+
const ratio = ((tgtSize / srcSize) * 100).toFixed(2);
|
|
93
|
+
utils_1.SLogger.info(`📊 压缩报告: ${formatSize(srcSize)} -> ${formatSize(tgtSize)} (压缩率: ${ratio}%)`);
|
|
94
|
+
}
|
|
95
|
+
catch (e) {
|
|
96
|
+
utils_1.SLogger.warn(`无法计算压缩率: ${e}`);
|
|
97
|
+
}
|
|
98
|
+
};
|
|
99
|
+
/**检查目标路径是否已存在
|
|
100
|
+
* @param targetpath 目标路径
|
|
101
|
+
* @returns 是否存在
|
|
102
|
+
*/
|
|
103
|
+
const exists = async (targetpath) => {
|
|
104
|
+
if (await utils_1.UtilFT.pathExists(targetpath.abs)) {
|
|
105
|
+
utils_1.SLogger.warn(`${targetpath.rel} 已经存在`);
|
|
106
|
+
return true;
|
|
107
|
+
}
|
|
108
|
+
return false;
|
|
109
|
+
};
|
|
110
|
+
/**添加后缀名
|
|
111
|
+
* @param pp - 输入路径
|
|
112
|
+
* @param ext - 后缀名
|
|
113
|
+
* @returns 带后缀名的路径
|
|
114
|
+
*/
|
|
115
|
+
const addExt = (pp, ext) => ({
|
|
116
|
+
list: pp.list,
|
|
117
|
+
abs: pathe_1.default.join(pathe_1.default.dirname(pp.abs), `${pp.base}.${ext}`),
|
|
118
|
+
rel: pathe_1.default.join(pathe_1.default.dirname(pp.rel), `${pp.base}.${ext}`),
|
|
119
|
+
base: `${pp.base}.${ext}`,
|
|
120
|
+
absdir: pp.absdir
|
|
121
|
+
});
|
|
122
|
+
/**生成tar文件名
|
|
123
|
+
* @param pp 输入路径
|
|
124
|
+
* @returns tar文件名
|
|
125
|
+
*/
|
|
126
|
+
const toTarName = (pp) => addExt(pp, 'tar');
|
|
127
|
+
/**将文件或目录打包为tar
|
|
128
|
+
* @param pp 输入路径
|
|
129
|
+
* @param silence 是否静默执行,不输出日志
|
|
130
|
+
*/
|
|
131
|
+
const toTar = async (pp, silence = false) => {
|
|
132
|
+
const tarPp = toTarName(pp);
|
|
133
|
+
if (await exists(tarPp))
|
|
134
|
+
return;
|
|
135
|
+
const flag = `📦 打包完成: ${tarPp.rel}`;
|
|
136
|
+
if (!silence)
|
|
137
|
+
utils_1.SLogger.time(flag);
|
|
138
|
+
await utils_1.UtilFunc.exec(`tar -cf "${tarPp.base}" ${pp.list.map(v => `"${v}"`).join(' ')}`, { cwd: pp.absdir, ...execopt });
|
|
139
|
+
if (!silence)
|
|
140
|
+
utils_1.SLogger.timeEnd(flag);
|
|
141
|
+
};
|
|
142
|
+
/**测试压缩文件完整性
|
|
143
|
+
* @param input 压缩文件路径
|
|
144
|
+
*/
|
|
145
|
+
const testCmp = async (pp) => {
|
|
146
|
+
utils_1.SLogger.info(`🔍 开始测试: ${pp.rel}`);
|
|
147
|
+
const { stderr } = await utils_1.UtilFunc.exec(`7z t "${pp.base}"`, { cwd: pp.absdir, ...execopt });
|
|
148
|
+
if (stderr.trim().length > 0) {
|
|
149
|
+
utils_1.SLogger.error(`❌ ${pp.rel} 可能损坏`);
|
|
150
|
+
utils_1.SLogger.error(stderr);
|
|
151
|
+
}
|
|
152
|
+
else {
|
|
153
|
+
utils_1.SLogger.info(`🟢 ${pp.rel} 测试通过`);
|
|
154
|
+
}
|
|
155
|
+
};
|
|
156
|
+
/**生成压缩完成标记
|
|
157
|
+
* @param pp 输入路径
|
|
158
|
+
* @returns 压缩完成标记文本
|
|
159
|
+
*/
|
|
160
|
+
const cmpFlag = (pp) => `🗃️ 压缩完成: ${pp.rel}`;
|
|
161
|
+
/**压缩队列,确保压缩操作串行执行 */
|
|
162
|
+
const cmpQueue = new utils_1.PromiseQueue({ concurrency: 1 });
|
|
163
|
+
/**通过队列执行压缩命令
|
|
164
|
+
* @param cmd 要执行的命令
|
|
165
|
+
* @param flag 日志标记
|
|
166
|
+
*/
|
|
167
|
+
const queueExec = async (cmd, flag, opt) => cmpQueue.enqueue(async () => {
|
|
168
|
+
utils_1.SLogger.time(flag);
|
|
169
|
+
await utils_1.UtilFunc.exec(cmd, opt);
|
|
170
|
+
utils_1.SLogger.timeEnd(flag);
|
|
171
|
+
});
|
|
172
|
+
/**压缩函数映射表 */
|
|
173
|
+
const compressFuncTable = {
|
|
174
|
+
/**tar格式压缩 */
|
|
175
|
+
tar: async ({ input, noTest }) => {
|
|
176
|
+
await toTar(input);
|
|
177
|
+
const tarName = toTarName(input);
|
|
178
|
+
if (noTest != true)
|
|
179
|
+
await testCmp(tarName);
|
|
180
|
+
},
|
|
181
|
+
/**zip格式压缩 */
|
|
182
|
+
zip: async ({ requirePack, input, noTest }) => {
|
|
183
|
+
const zipPp = addExt(input, 'zip');
|
|
184
|
+
if (await exists(zipPp))
|
|
185
|
+
return;
|
|
186
|
+
await queueExec(`7z a -bd -tzip "${zipPp.base}" ${input.list.map(v => `"${v}"`).join(' ')}`, cmpFlag(zipPp), { cwd: input.absdir, ...execopt });
|
|
187
|
+
//打印压缩率
|
|
188
|
+
if (!requirePack)
|
|
189
|
+
await logCompressionRatio(input.abs, zipPp.abs);
|
|
190
|
+
if (noTest != true)
|
|
191
|
+
await testCmp(zipPp);
|
|
192
|
+
},
|
|
193
|
+
/**7z格式压缩
|
|
194
|
+
* 用于百兆级低熵目录压缩
|
|
195
|
+
* 在百兆级低熵目录表现极佳, 由于免归档快于 tar+zstd, 且压缩率比tar+xz略高
|
|
196
|
+
* 对于顶点等结构化数据(unity数据)时, 7-zip的实现下, 7z会自动使用delta过滤器, 任意大小下表现比xz强10%左右
|
|
197
|
+
*/
|
|
198
|
+
'7z': async ({ requirePack, input, noTest }) => {
|
|
199
|
+
const szPp = addExt(input, '7z');
|
|
200
|
+
if (await exists(szPp))
|
|
201
|
+
return;
|
|
202
|
+
await queueExec(`7z a -bd -t7z "${szPp.base}" ${input.list.map(v => `"${v}"`).join(' ')}`, cmpFlag(szPp), { cwd: input.absdir, ...execopt });
|
|
203
|
+
//打印压缩率
|
|
204
|
+
if (!requirePack)
|
|
205
|
+
await logCompressionRatio(input.abs, szPp.abs);
|
|
206
|
+
if (noTest != true)
|
|
207
|
+
await testCmp(szPp);
|
|
208
|
+
},
|
|
209
|
+
/**zstd格式压缩
|
|
210
|
+
* 用于高熵文件(png|ckpt|pth|mp4)压缩
|
|
211
|
+
* 在任意高熵文件下极快的获得95%左右的压缩率, 相比7z/xz多接近3%
|
|
212
|
+
* 解压极快
|
|
213
|
+
*/
|
|
214
|
+
zstd: async ({ requirePack, input, keep, noTest }) => {
|
|
215
|
+
const pakPp = requirePack ? toTarName(input) : input;
|
|
216
|
+
const zstdPp = addExt(pakPp, 'zst');
|
|
217
|
+
if (requirePack && await exists(pakPp))
|
|
218
|
+
return;
|
|
219
|
+
if (await exists(zstdPp))
|
|
220
|
+
return;
|
|
221
|
+
if (requirePack)
|
|
222
|
+
await toTar(input);
|
|
223
|
+
await queueExec(`zstd -q -T0 "${pakPp.base}" -o "${zstdPp.base}"`, cmpFlag(zstdPp), { cwd: pakPp.absdir, ...execopt });
|
|
224
|
+
//打印压缩率
|
|
225
|
+
await logCompressionRatio(pakPp.abs, zstdPp.abs);
|
|
226
|
+
// 清理中间 tar
|
|
227
|
+
if (requirePack && !keep)
|
|
228
|
+
await fs_1.default.promises.rm(pakPp.abs, { force: true });
|
|
229
|
+
if (noTest != true)
|
|
230
|
+
await testCmp(zstdPp);
|
|
231
|
+
},
|
|
232
|
+
/**xz格式压缩
|
|
233
|
+
* 用于单文件或巨大目录压缩
|
|
234
|
+
* 在结果为百兆级较小时表现不如7z
|
|
235
|
+
*/
|
|
236
|
+
xz: async ({ requirePack, input, keep, noTest }) => {
|
|
237
|
+
const pakPp = requirePack ? toTarName(input) : input;
|
|
238
|
+
const xzPp = addExt(pakPp, 'xz');
|
|
239
|
+
if (requirePack && await exists(pakPp))
|
|
240
|
+
return;
|
|
241
|
+
if (await exists(xzPp))
|
|
242
|
+
return;
|
|
243
|
+
if (requirePack)
|
|
244
|
+
await toTar(input);
|
|
245
|
+
await queueExec(`7z a -bd -txz "${xzPp.base}" "${pakPp.base}"`, cmpFlag(xzPp), { cwd: pakPp.absdir, ...execopt });
|
|
246
|
+
//打印压缩率
|
|
247
|
+
await logCompressionRatio(pakPp.abs, xzPp.abs);
|
|
248
|
+
// 清理中间 tar
|
|
249
|
+
if (requirePack && !keep)
|
|
250
|
+
await fs_1.default.promises.rm(pakPp.abs, { force: true });
|
|
251
|
+
if (noTest != true)
|
|
252
|
+
await testCmp(xzPp);
|
|
253
|
+
}
|
|
254
|
+
};
|
|
255
|
+
/**单个文件/目录归档逻辑
|
|
256
|
+
* @param opt 压缩选项
|
|
257
|
+
* @param format 压缩格式
|
|
258
|
+
*/
|
|
259
|
+
async function compressOne(opt, format) {
|
|
260
|
+
const { input } = opt;
|
|
261
|
+
const requirePack = input.list.length > 1 || (await fs_1.default.promises.lstat(opt.input.abs)).isDirectory();
|
|
262
|
+
utils_1.SLogger.info(`📁 开始归档: ${opt.input.rel}`);
|
|
263
|
+
const flag = `☑️ 归档完成: ${opt.input.rel}`;
|
|
264
|
+
utils_1.SLogger.time(flag);
|
|
265
|
+
await compressFuncTable[format]({ requirePack, ...opt });
|
|
266
|
+
utils_1.SLogger.timeEnd(flag);
|
|
267
|
+
}
|
|
@@ -4,7 +4,7 @@ import type { Command } from "commander";
|
|
|
4
4
|
export declare function checkNpmToken(opt?: {
|
|
5
5
|
logstd?: boolean;
|
|
6
6
|
threshold?: number;
|
|
7
|
-
|
|
7
|
+
logLvl?: LogLevel | ((v: string) => void);
|
|
8
8
|
}): Promise<string | undefined>;
|
|
9
9
|
/**更新版本号并发布npm包 */
|
|
10
10
|
export declare const CmdCheckToken: (program: Command) => Command;
|
|
@@ -5,7 +5,7 @@ exports.checkNpmToken = checkNpmToken;
|
|
|
5
5
|
const utils_1 = require("@zwa73/utils");
|
|
6
6
|
/**检查npm token过期 */
|
|
7
7
|
async function checkNpmToken(opt) {
|
|
8
|
-
const { logstd = false, threshold = 15,
|
|
8
|
+
const { logstd = false, threshold = 15, logLvl = 'warn', } = opt ?? {};
|
|
9
9
|
try {
|
|
10
10
|
const { stderr, stdout } = await utils_1.UtilFunc.exec('npm token list', {
|
|
11
11
|
outlvl: logstd ? console.log : undefined
|
|
@@ -28,10 +28,10 @@ async function checkNpmToken(opt) {
|
|
|
28
28
|
}
|
|
29
29
|
const msgText = msg.join('\n');
|
|
30
30
|
if (msgText.length > 0) {
|
|
31
|
-
if (typeof
|
|
32
|
-
utils_1.SLogger.log(
|
|
33
|
-
else if (typeof
|
|
34
|
-
|
|
31
|
+
if (typeof logLvl == 'string' && logLvl !== 'none')
|
|
32
|
+
utils_1.SLogger.log(logLvl, msgText);
|
|
33
|
+
else if (typeof logLvl == 'function')
|
|
34
|
+
logLvl(msgText);
|
|
35
35
|
return msgText;
|
|
36
36
|
}
|
|
37
37
|
return undefined;
|
package/dist/Command/MapPath.js
CHANGED
|
@@ -17,7 +17,7 @@ const CmdMapPath = (program) => program
|
|
|
17
17
|
.argument("<replacement>", "替换字符串")
|
|
18
18
|
.option("-e, --exclude <regex>", "排除文件的正则表达式")
|
|
19
19
|
.option(`-d, --duplicate-handling <${DupMethodList.join('|')}>`, `处理重名文件的方式:
|
|
20
|
-
skip 不进行处理
|
|
20
|
+
skip 不进行处理 默认
|
|
21
21
|
overwrite 覆盖重名
|
|
22
22
|
rename 以 [name]_idx[.ext?] 的形式重命名`, "skip")
|
|
23
23
|
.option("-r, --recursive", "递归的处理子目录", false)
|
package/dist/Command/Release.js
CHANGED
|
@@ -30,7 +30,7 @@ const CmdRelease = (program) => program
|
|
|
30
30
|
await (0, Bump_1.bump)(opt);
|
|
31
31
|
utils_1.SLogger.info(`开始发布项目`);
|
|
32
32
|
try {
|
|
33
|
-
const chkPromise = (0, CheckToken_1.checkNpmToken)({
|
|
33
|
+
const chkPromise = (0, CheckToken_1.checkNpmToken)({ logLvl: 'none' });
|
|
34
34
|
if (opt.local) {
|
|
35
35
|
utils_1.SLogger.info("正在打包...");
|
|
36
36
|
const fullpath = pathe_1.default.join(process.cwd(), opt.local);
|
package/dist/Command/Route.js
CHANGED
|
@@ -2,6 +2,7 @@
|
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
3
|
exports.cliRoute = cliRoute;
|
|
4
4
|
const commander_1 = require("commander");
|
|
5
|
+
const Archive_1 = require("./Archive");
|
|
5
6
|
const Bump_1 = require("./Bump");
|
|
6
7
|
const CheckToken_1 = require("./CheckToken");
|
|
7
8
|
const ExpandMacro_1 = require("./ExpandMacro");
|
|
@@ -23,5 +24,6 @@ async function cliRoute() {
|
|
|
23
24
|
(0, GenTemplate_1.CmdGenTemplate)(commander_1.program);
|
|
24
25
|
(0, CheckToken_1.CmdCheckToken)(commander_1.program);
|
|
25
26
|
(0, Bump_1.CmdBump)(commander_1.program);
|
|
27
|
+
(0, Archive_1.CmdArchive)(commander_1.program);
|
|
26
28
|
commander_1.program.parse(process.argv);
|
|
27
29
|
}
|
package/dist/Command/ScanDups.js
CHANGED
|
@@ -38,7 +38,7 @@ async function struct2hash(struct, fn) {
|
|
|
38
38
|
return (await Promise.all(recursion(v))).flat();
|
|
39
39
|
});
|
|
40
40
|
};
|
|
41
|
-
return utils_1.UtilFunc.
|
|
41
|
+
return utils_1.UtilFunc.computeHash((await Promise.all(recursion(struct)))
|
|
42
42
|
.flat().join('|'), { algorithm: "blake2b512" });
|
|
43
43
|
}
|
|
44
44
|
/**重命名文件或路径 scan_duplicates */
|
|
@@ -97,8 +97,8 @@ const CmdScanDups = (program) => program
|
|
|
97
97
|
.map(async (filePath) => ({
|
|
98
98
|
filePath,
|
|
99
99
|
hash: struct
|
|
100
|
-
? await struct2hash(await scanDirStruct(filePath), async (str) => utils_1.UtilFT.
|
|
101
|
-
: await utils_1.UtilFT.
|
|
100
|
+
? await struct2hash(await scanDirStruct(filePath), async (str) => utils_1.UtilFT.computeHash(str, { sampled: true })).then(tap(() => sampledProgress.increment()))
|
|
101
|
+
: await utils_1.UtilFT.computeHash(filePath, { sampled: true }).then(tap(() => sampledProgress.increment())),
|
|
102
102
|
}))
|
|
103
103
|
.toArray(),
|
|
104
104
|
// 第三步:筛选重复的采样哈希 (去掉唯一的采样哈希)
|
|
@@ -109,8 +109,8 @@ const CmdScanDups = (program) => program
|
|
|
109
109
|
.map(async (filePath) => ({
|
|
110
110
|
filePath,
|
|
111
111
|
hash: struct
|
|
112
|
-
? await struct2hash(await scanDirStruct(filePath), async (str) => utils_1.UtilFT.
|
|
113
|
-
: await utils_1.UtilFT.
|
|
112
|
+
? await struct2hash(await scanDirStruct(filePath), async (str) => utils_1.UtilFT.computeHash(str)).then(tap(() => sampledProgress.increment()))
|
|
113
|
+
: await utils_1.UtilFT.computeHash(filePath).then(tap(() => fullHashProgress.increment())), // 计算完整哈希
|
|
114
114
|
}))
|
|
115
115
|
.toArray(),
|
|
116
116
|
// 第五步:重新整理完整哈希结果, 过滤唯一哈希
|