@zwa73/dev-utils 1.0.86 → 1.0.87

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,5 +1,5 @@
1
1
  {
2
2
  "extends": "./tsconfig.json",
3
3
  "include": ["./src/**/*.ts", "./src/**/*.js"],
4
- "exclude": ["./node_modules/**/*","./src/**/*.macro.ts"]
4
+ "exclude": ["./node_modules/**/*","./src/**/*.macro.ts","./src/**/*.schema.ts"]
5
5
  }
@@ -56,7 +56,7 @@ overwrite 覆盖重名
56
56
  if (await utils_1.UtilFT.pathExists(newFilePath)) {
57
57
  if (DupMethodWithoutMove.includes(options.duplicateHandling)) {
58
58
  const fixhd = duplicateHandling;
59
- await (0, utils_1.matchProc)(fixhd, {
59
+ await (0, utils_1.match)(fixhd, {
60
60
  'skip': () => utils_1.SLogger.info(`重名文件存在,跳过:${newFilePath}`),
61
61
  'overwrite': () => mapPath(filePath, newFilePath),
62
62
  });
@@ -8,6 +8,8 @@ const utils_1 = require("@zwa73/utils");
8
8
  const fs_1 = __importDefault(require("fs"));
9
9
  const crypto_1 = __importDefault(require("crypto"));
10
10
  const pathe_1 = __importDefault(require("pathe"));
11
+ const cli_progress_1 = __importDefault(require("cli-progress"));
12
+ const { tap } = utils_1.UtilFP;
11
13
  async function calculateHash(filePath) {
12
14
  return new Promise((resolve, reject) => {
13
15
  const hash = crypto_1.default.createHash('md5');
@@ -17,34 +19,83 @@ async function calculateHash(filePath) {
17
19
  stream.on('error', reject);
18
20
  });
19
21
  }
22
+ async function calculateSampledHash(filePath, chunkSize = 1024, // 每块大小(例如 1KB)
23
+ chunkCount = 10) {
24
+ const stats = await fs_1.default.promises.stat(filePath);
25
+ const totalSize = stats.size;
26
+ if (totalSize < chunkSize * chunkCount)
27
+ return calculateHash(filePath);
28
+ const positions = Array.from({ length: chunkCount }, (_, i) => Math.floor((totalSize / chunkCount) * i));
29
+ const hash = crypto_1.default.createHash("md5"); // 或其他哈希算法,如 CRC32/BLAKE3
30
+ for (const position of positions) {
31
+ const buffer = await new Promise((resolve, reject) => {
32
+ // 创建一个文件流,限制读取范围
33
+ const stream = fs_1.default.createReadStream(filePath, {
34
+ start: position,
35
+ end: Math.min(position + chunkSize - 1, totalSize - 1), // 确保不会超出文件末尾
36
+ highWaterMark: chunkSize, // 高效流式块
37
+ });
38
+ const chunks = [];
39
+ stream.on("data", (chunk) => chunks.push(chunk));
40
+ stream.on("end", () => resolve(Buffer.concat(chunks))); // 合并块数据
41
+ stream.on("error", reject);
42
+ });
43
+ hash.update(buffer); // 更新哈希计算
44
+ }
45
+ return hash.digest("hex"); // 返回最终哈希值
46
+ }
20
47
  /**重命名文件或路径 scan_duplicates */
21
48
  const CmdScanDups = (program) => program
22
49
  .command("Scan-Dups")
23
50
  .alias("scandups")
24
51
  .description("扫描当前目录下hash重复的文件")
25
52
  .option("-re, --regex <regex>", "文件的正则表达式, 使用posix路径", ".*")
26
- .option("-o, --out <dir|console>", "输出的json文件路径, 默认 scandupsOut.json, 为 \"console\" 时无文件输出", "scandupsOut")
53
+ .option("-o, --out <dir|console>", "输出的json文件路径, 默认 scandups.json, 为 \"console\" 时无文件输出", "scandups")
27
54
  .option("-r, --recursive", "是否处理子目录, 默认 true", true)
28
55
  .action(async (options) => {
29
56
  const regex = new RegExp(options.regex);
30
57
  const basePath = process.cwd();
31
- const pList = (await utils_1.UtilFT.fileSearchRegex(basePath, regex.source, {
32
- relative: options.recursive,
33
- })).map(async (filePath) => ({ filePath, hash: await calculateHash(filePath) }));
34
- const hashMap = (await Promise.all(pList)).reduce((obj, cur) => {
35
- obj[cur.hash] = (obj[cur.hash] ?? []).concat(cur.filePath);
36
- return obj;
58
+ // **添加一个多步进度条**
59
+ const progressBar = new cli_progress_1.default.MultiBar({
60
+ clearOnComplete: true, hideCursor: true,
61
+ format: " {task} [{bar}] {percentage}% | ETA: {eta}s | {value}/{total} | {status}",
62
+ }, cli_progress_1.default.Presets.shades_classic);
63
+ // 采样哈希进度条
64
+ const sampledProgress = progressBar.create(1, 0, { task: "快速扫描", status: "准备中..." });
65
+ // 完整哈希进度条
66
+ const fullHashProgress = progressBar.create(1, 0, { task: "完整扫描", status: "准备中..." });
67
+ const reduce2Dupmap = (list) => list.reduce((acc, cur) => {
68
+ const files = acc[cur.hash] ?? [];
69
+ acc[cur.hash] = [...files, cur.filePath]; // 分类采样哈希到对应文件路径
70
+ return acc;
37
71
  }, {});
38
- const out = {};
39
- for (const hash in hashMap) {
40
- const duplicateFiles = hashMap[hash];
41
- if (duplicateFiles.length <= 1)
42
- continue;
43
- out[hash] = duplicateFiles;
44
- }
45
- if (options.out === "console")
46
- utils_1.SLogger.info(out);
47
- else
48
- await utils_1.UtilFT.writeJSONFile(pathe_1.default.join(basePath, options.out), out);
72
+ await (0, utils_1.pipe)(
73
+ // 第一步:文件搜索,获取符合正则的文件路径
74
+ utils_1.UtilFT.fileSearchRegex(basePath, regex.source, { recursive: options.recursive }), tap(list => void sampledProgress.setTotal(list.length) ??
75
+ void sampledProgress.update(0, { status: `总计 ${list.length} 个文件` })),
76
+ // 第二步:快速扫描,计算采样哈希
77
+ list => utils_1.Stream.from(list, 8)
78
+ .map(async (filePath) => ({
79
+ filePath,
80
+ hash: await calculateSampledHash(filePath).then(tap(() => sampledProgress.increment())),
81
+ }))
82
+ .toArray(),
83
+ // 第三步:筛选重复的采样哈希(去掉唯一的采样哈希)
84
+ reduce2Dupmap, map => Object.entries(map).reduce((acc, [hash, files]) => files.length > 1 ? [...acc, ...files] : acc, // 筛选出重复采样哈希的文件路径
85
+ []), tap(dupPaths => void fullHashProgress.setTotal(dupPaths.length) ??
86
+ void fullHashProgress.update(0, { status: `快速扫描检出 ${dupPaths.length} 个可能的相等项` })),
87
+ // 第四步:对筛选出的重复文件路径并发计算完整哈希
88
+ dups => utils_1.Stream.from(dups, 8)
89
+ .map(async (filePath) => ({
90
+ filePath,
91
+ hash: await calculateHash(filePath).then(tap(() => fullHashProgress.increment())), // 计算完整哈希
92
+ }))
93
+ .toArray(),
94
+ // 第五步:重新整理完整哈希结果,过滤唯一哈希
95
+ reduce2Dupmap, map => Object.entries(map).reduce((acc, [hash, files]) => files.length <= 1 ? acc : { ...acc, [hash]: files }, {}),
96
+ // 第六步:输出结果
97
+ tap(() => progressBar.stop()), out => (0, utils_1.match)(options.out, {
98
+ "console": () => utils_1.SLogger.info(out),
99
+ }, () => utils_1.UtilFT.writeJSONFile(pathe_1.default.join(basePath, options.out), out)));
49
100
  });
50
101
  exports.CmdScanDups = CmdScanDups;
@@ -161,7 +161,8 @@ var UtilDT;
161
161
  UtilDT.batchNode = batchNode;
162
162
  //#region macro工具
163
163
  const parseMacroPaths = (opt) => {
164
- const loc = utils_1.UtilFunc.getFuncLoc(3);
164
+ // JsFunc -> ComposeFunc -> xxxmacro -> parseMacroPaths -> sourceTS
165
+ const loc = utils_1.UtilFunc.getFuncLoc(4);
165
166
  if (!loc && !opt?.filePath)
166
167
  (0, utils_1.throwError)(`parseMacroPaths 未能找到函数位置`);
167
168
  const basePath = loc?.filePath;
@@ -232,7 +233,7 @@ var UtilDT;
232
233
  else if (!opt?.glob)
233
234
  utils_1.SLogger.error(`UtilDT.regionMacro 无法找到区域 ${regionId}`);
234
235
  };
235
- plist.push(utils_1.UtilFunc.queueProc(pathe_1.default.normalize(filePath), queuefunc));
236
+ plist.push(utils_1.PromiseQueue.enqueue(pathe_1.default.normalize(filePath), queuefunc));
236
237
  }
237
238
  await Promise.all(plist);
238
239
  }
@@ -290,7 +291,7 @@ var UtilDT;
290
291
  else if (!opt?.glob)
291
292
  utils_1.SLogger.error(`UtilDT.commentMacro 无法找到注释 ${commentId}`);
292
293
  };
293
- plist.push(utils_1.UtilFunc.queueProc(pathe_1.default.normalize(filePath), queuefunc));
294
+ plist.push(utils_1.PromiseQueue.enqueue(pathe_1.default.normalize(filePath), queuefunc));
294
295
  }
295
296
  await Promise.all(plist);
296
297
  }
@@ -317,7 +318,7 @@ var UtilDT;
317
318
  });
318
319
  await fs.promises.writeFile(filePath, parseCode, 'utf-8');
319
320
  };
320
- plist.push(utils_1.UtilFunc.queueProc(pathe_1.default.normalize(filePath), queuefunc));
321
+ plist.push(utils_1.PromiseQueue.enqueue(pathe_1.default.normalize(filePath), queuefunc));
321
322
  }
322
323
  await Promise.all(plist);
323
324
  }
@@ -1,4 +1,4 @@
1
- import { SLogger, UtilFT, matchProc, throwError } from "@zwa73/utils";
1
+ import { SLogger, UtilFT, match, throwError } from "@zwa73/utils";
2
2
  import fs from "fs";
3
3
  import path from "pathe";
4
4
  const DupMethodList = ["skip", "overwrite", "move"];
@@ -50,7 +50,7 @@ overwrite 覆盖重名
50
50
  if (await UtilFT.pathExists(newFilePath)) {
51
51
  if (DupMethodWithoutMove.includes(options.duplicateHandling)) {
52
52
  const fixhd = duplicateHandling;
53
- await matchProc(fixhd, {
53
+ await match(fixhd, {
54
54
  'skip': () => SLogger.info(`重名文件存在,跳过:${newFilePath}`),
55
55
  'overwrite': () => mapPath(filePath, newFilePath),
56
56
  });
@@ -1,7 +1,9 @@
1
- import { SLogger, UtilFT } from "@zwa73/utils";
1
+ import { match, pipe, SLogger, Stream, UtilFP, UtilFT } from "@zwa73/utils";
2
2
  import fs from "fs";
3
3
  import crypto from 'crypto';
4
4
  import path from "pathe";
5
+ import cliProgress from "cli-progress";
6
+ const { tap } = UtilFP;
5
7
  async function calculateHash(filePath) {
6
8
  return new Promise((resolve, reject) => {
7
9
  const hash = crypto.createHash('md5');
@@ -11,33 +13,82 @@ async function calculateHash(filePath) {
11
13
  stream.on('error', reject);
12
14
  });
13
15
  }
16
+ async function calculateSampledHash(filePath, chunkSize = 1024, // 每块大小(例如 1KB)
17
+ chunkCount = 10) {
18
+ const stats = await fs.promises.stat(filePath);
19
+ const totalSize = stats.size;
20
+ if (totalSize < chunkSize * chunkCount)
21
+ return calculateHash(filePath);
22
+ const positions = Array.from({ length: chunkCount }, (_, i) => Math.floor((totalSize / chunkCount) * i));
23
+ const hash = crypto.createHash("md5"); // 或其他哈希算法,如 CRC32/BLAKE3
24
+ for (const position of positions) {
25
+ const buffer = await new Promise((resolve, reject) => {
26
+ // 创建一个文件流,限制读取范围
27
+ const stream = fs.createReadStream(filePath, {
28
+ start: position,
29
+ end: Math.min(position + chunkSize - 1, totalSize - 1), // 确保不会超出文件末尾
30
+ highWaterMark: chunkSize, // 高效流式块
31
+ });
32
+ const chunks = [];
33
+ stream.on("data", (chunk) => chunks.push(chunk));
34
+ stream.on("end", () => resolve(Buffer.concat(chunks))); // 合并块数据
35
+ stream.on("error", reject);
36
+ });
37
+ hash.update(buffer); // 更新哈希计算
38
+ }
39
+ return hash.digest("hex"); // 返回最终哈希值
40
+ }
14
41
  /**重命名文件或路径 scan_duplicates */
15
42
  export const CmdScanDups = (program) => program
16
43
  .command("Scan-Dups")
17
44
  .alias("scandups")
18
45
  .description("扫描当前目录下hash重复的文件")
19
46
  .option("-re, --regex <regex>", "文件的正则表达式, 使用posix路径", ".*")
20
- .option("-o, --out <dir|console>", "输出的json文件路径, 默认 scandupsOut.json, 为 \"console\" 时无文件输出", "scandupsOut")
47
+ .option("-o, --out <dir|console>", "输出的json文件路径, 默认 scandups.json, 为 \"console\" 时无文件输出", "scandups")
21
48
  .option("-r, --recursive", "是否处理子目录, 默认 true", true)
22
49
  .action(async (options) => {
23
50
  const regex = new RegExp(options.regex);
24
51
  const basePath = process.cwd();
25
- const pList = (await UtilFT.fileSearchRegex(basePath, regex.source, {
26
- relative: options.recursive,
27
- })).map(async (filePath) => ({ filePath, hash: await calculateHash(filePath) }));
28
- const hashMap = (await Promise.all(pList)).reduce((obj, cur) => {
29
- obj[cur.hash] = (obj[cur.hash] ?? []).concat(cur.filePath);
30
- return obj;
52
+ // **添加一个多步进度条**
53
+ const progressBar = new cliProgress.MultiBar({
54
+ clearOnComplete: true, hideCursor: true,
55
+ format: " {task} [{bar}] {percentage}% | ETA: {eta}s | {value}/{total} | {status}",
56
+ }, cliProgress.Presets.shades_classic);
57
+ // 采样哈希进度条
58
+ const sampledProgress = progressBar.create(1, 0, { task: "快速扫描", status: "准备中..." });
59
+ // 完整哈希进度条
60
+ const fullHashProgress = progressBar.create(1, 0, { task: "完整扫描", status: "准备中..." });
61
+ const reduce2Dupmap = (list) => list.reduce((acc, cur) => {
62
+ const files = acc[cur.hash] ?? [];
63
+ acc[cur.hash] = [...files, cur.filePath]; // 分类采样哈希到对应文件路径
64
+ return acc;
31
65
  }, {});
32
- const out = {};
33
- for (const hash in hashMap) {
34
- const duplicateFiles = hashMap[hash];
35
- if (duplicateFiles.length <= 1)
36
- continue;
37
- out[hash] = duplicateFiles;
38
- }
39
- if (options.out === "console")
40
- SLogger.info(out);
41
- else
42
- await UtilFT.writeJSONFile(path.join(basePath, options.out), out);
66
+ await pipe(
67
+ // 第一步:文件搜索,获取符合正则的文件路径
68
+ UtilFT.fileSearchRegex(basePath, regex.source, { recursive: options.recursive }), tap(list => void sampledProgress.setTotal(list.length) ??
69
+ void sampledProgress.update(0, { status: `总计 ${list.length} 个文件` })),
70
+ // 第二步:快速扫描,计算采样哈希
71
+ list => Stream.from(list, 8)
72
+ .map(async (filePath) => ({
73
+ filePath,
74
+ hash: await calculateSampledHash(filePath).then(tap(() => sampledProgress.increment())),
75
+ }))
76
+ .toArray(),
77
+ // 第三步:筛选重复的采样哈希(去掉唯一的采样哈希)
78
+ reduce2Dupmap, map => Object.entries(map).reduce((acc, [hash, files]) => files.length > 1 ? [...acc, ...files] : acc, // 筛选出重复采样哈希的文件路径
79
+ []), tap(dupPaths => void fullHashProgress.setTotal(dupPaths.length) ??
80
+ void fullHashProgress.update(0, { status: `快速扫描检出 ${dupPaths.length} 个可能的相等项` })),
81
+ // 第四步:对筛选出的重复文件路径并发计算完整哈希
82
+ dups => Stream.from(dups, 8)
83
+ .map(async (filePath) => ({
84
+ filePath,
85
+ hash: await calculateHash(filePath).then(tap(() => fullHashProgress.increment())), // 计算完整哈希
86
+ }))
87
+ .toArray(),
88
+ // 第五步:重新整理完整哈希结果,过滤唯一哈希
89
+ reduce2Dupmap, map => Object.entries(map).reduce((acc, [hash, files]) => files.length <= 1 ? acc : { ...acc, [hash]: files }, {}),
90
+ // 第六步:输出结果
91
+ tap(() => progressBar.stop()), out => match(options.out, {
92
+ "console": () => SLogger.info(out),
93
+ }, () => UtilFT.writeJSONFile(path.join(basePath, options.out), out)));
43
94
  });
@@ -1,7 +1,7 @@
1
1
  import path from 'pathe';
2
2
  import * as TJS from 'typescript-json-schema';
3
3
  import * as fs from 'fs';
4
- import { SLogger, UtilFT, UtilFunc, dedent, throwError } from '@zwa73/utils';
4
+ import { PromiseQueue, SLogger, UtilFT, UtilFunc, dedent, throwError } from '@zwa73/utils';
5
5
  import { Project, SyntaxKind } from 'ts-morph';
6
6
  export var UtilDT;
7
7
  (function (UtilDT) {
@@ -132,7 +132,8 @@ export var UtilDT;
132
132
  UtilDT.batchNode = batchNode;
133
133
  //#region macro工具
134
134
  const parseMacroPaths = (opt) => {
135
- const loc = UtilFunc.getFuncLoc(3);
135
+ // JsFunc -> ComposeFunc -> xxxmacro -> parseMacroPaths -> sourceTS
136
+ const loc = UtilFunc.getFuncLoc(4);
136
137
  if (!loc && !opt?.filePath)
137
138
  throwError(`parseMacroPaths 未能找到函数位置`);
138
139
  const basePath = loc?.filePath;
@@ -203,7 +204,7 @@ export var UtilDT;
203
204
  else if (!opt?.glob)
204
205
  SLogger.error(`UtilDT.regionMacro 无法找到区域 ${regionId}`);
205
206
  };
206
- plist.push(UtilFunc.queueProc(path.normalize(filePath), queuefunc));
207
+ plist.push(PromiseQueue.enqueue(path.normalize(filePath), queuefunc));
207
208
  }
208
209
  await Promise.all(plist);
209
210
  }
@@ -261,7 +262,7 @@ export var UtilDT;
261
262
  else if (!opt?.glob)
262
263
  SLogger.error(`UtilDT.commentMacro 无法找到注释 ${commentId}`);
263
264
  };
264
- plist.push(UtilFunc.queueProc(path.normalize(filePath), queuefunc));
265
+ plist.push(PromiseQueue.enqueue(path.normalize(filePath), queuefunc));
265
266
  }
266
267
  await Promise.all(plist);
267
268
  }
@@ -288,7 +289,7 @@ export var UtilDT;
288
289
  });
289
290
  await fs.promises.writeFile(filePath, parseCode, 'utf-8');
290
291
  };
291
- plist.push(UtilFunc.queueProc(path.normalize(filePath), queuefunc));
292
+ plist.push(PromiseQueue.enqueue(path.normalize(filePath), queuefunc));
292
293
  }
293
294
  await Promise.all(plist);
294
295
  }
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@zwa73/dev-utils",
3
- "version": "1.0.86",
3
+ "version": "1.0.87",
4
4
  "description": "编译与调试工具",
5
5
  "exports": {
6
6
  ".": {
@@ -24,6 +24,7 @@
24
24
  "dependencies": {
25
25
  "@deepkit/type-compiler": "^1.0.1-alpha.150",
26
26
  "@zwa73/utils": "*",
27
+ "cli-progress": "^3.12.0",
27
28
  "commander": "^11.1.0",
28
29
  "pathe": "^1.1.2",
29
30
  "ts-morph": "^23.0.0",
@@ -32,6 +33,7 @@
32
33
  "typescript-json-schema": "^0.64.0"
33
34
  },
34
35
  "devDependencies": {
36
+ "@types/cli-progress": "^3.11.6",
35
37
  "@types/jest": "^29.5.12",
36
38
  "@types/node": "^20.14.11",
37
39
  "jest": "^29.7.0",