js-code-detector 0.0.3 → 0.0.5
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +4 -4
- package/bin/sameCodeDetect.js +4 -0
- package/dist/cjs/index.d.ts +1 -0
- package/dist/cjs/index.js +14 -2
- package/dist/cjs/util/ast_util/AstFeatUtil.d.ts +63 -0
- package/dist/cjs/util/ast_util/AstFeatUtil.js +330 -0
- package/dist/cjs/util/ast_util/Core.d.ts +70 -0
- package/dist/cjs/util/ast_util/Core.js +193 -0
- package/dist/cjs/util/ast_util/FileUtil.d.ts +3 -3
- package/dist/cjs/util/report_util/createMdByJson.d.ts +2 -0
- package/dist/cjs/util/report_util/createMdByJson.js +74 -0
- package/dist/cjs/util/report_util.d.ts +8 -0
- package/dist/cjs/util/shared/astNodeFeatureExtractorMap.d.ts +2 -0
- package/dist/cjs/util/shared/astNodeFeatureExtractorMap.js +59 -0
- package/dist/cjs/util/shared/featureLevel.d.ts +5 -0
- package/dist/cjs/util/shared/featureLevel.js +33 -0
- package/dist/cjs/util/shared/readDirFiles.d.ts +4 -0
- package/dist/cjs/util/shared/readDirFiles.js +45 -0
- package/package.json +7 -2
- package/dist/cjs/util/ast_parser.d.ts +0 -0
- package/dist/cjs/util/ast_parser.js +0 -0
package/README.md
CHANGED
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
# code-detector
|
|
1
|
+
# js-code-detector
|
|
2
2
|
|
|
3
3
|
[](https://npmjs.com/package/code-detector)
|
|
4
4
|
[](https://npmjs.com/package/code-detector)
|
|
@@ -21,9 +21,9 @@ $ npm run build
|
|
|
21
21
|
|
|
22
22
|
## 使用
|
|
23
23
|
|
|
24
|
-
|
|
25
|
-
package.json 中添加 scripts 命令 ```
|
|
26
|
-
|
|
24
|
+
1.项目安装 **js-code-detector**
|
|
25
|
+
2.package.json 中添加 scripts 命令 ```detect```
|
|
26
|
+
3.执行该命令可在当前项目目录下生成报告
|
|
27
27
|
|
|
28
28
|
## 报告说明
|
|
29
29
|
- filePath 文件路径
|
package/dist/cjs/index.d.ts
CHANGED
package/dist/cjs/index.js
CHANGED
|
@@ -31,6 +31,7 @@ var src_exports = {};
|
|
|
31
31
|
__export(src_exports, {
|
|
32
32
|
generateReport: () => generateReport,
|
|
33
33
|
getGitRepositoryAndBranch: () => getGitRepositoryAndBranch,
|
|
34
|
+
sameCodeDetect: () => sameCodeDetect,
|
|
34
35
|
umiPluginCallback: () => umiPluginCallback,
|
|
35
36
|
writeGitDiffTxt: () => writeGitDiffTxt
|
|
36
37
|
});
|
|
@@ -43,7 +44,10 @@ var import_fs2 = require("fs");
|
|
|
43
44
|
var import_report_util = require("./util/report_util");
|
|
44
45
|
var import_utils = require("@umijs/utils");
|
|
45
46
|
var import_dayjs = __toESM(require("dayjs"));
|
|
46
|
-
var
|
|
47
|
+
var import_createMdByJson = require("./util/report_util/createMdByJson");
|
|
48
|
+
var import_readDirFiles = require("./util/shared/readDirFiles");
|
|
49
|
+
var import_Core = __toESM(require("./util/ast_util/Core"));
|
|
50
|
+
var jsonName = "git_diff_report.md";
|
|
47
51
|
async function umiPluginCallback(api) {
|
|
48
52
|
const diff_txt = (0, import_fs.readFileSync)((0, import_path.join)(api.cwd, "git_diff.txt"), "utf-8");
|
|
49
53
|
const gitDiffDetail = (0, import_format_git_diff_content.formatGitDiffContent)(diff_txt);
|
|
@@ -57,7 +61,8 @@ async function umiPluginCallback(api) {
|
|
|
57
61
|
const usingFileNoPrefix = usingFiles.map((item) => item.filePath.replace(absPathPrefix, ""));
|
|
58
62
|
const groupGitDiffLines = gitDiffDetail.filter((item) => usingFileNoPrefix.includes(item.filePath));
|
|
59
63
|
const reports = (0, import_report_util.createDetectReport)({ groupGitDiffLines, tree, absPathPrefix });
|
|
60
|
-
|
|
64
|
+
const mdContent = (0, import_createMdByJson.createMdByJson)(reports);
|
|
65
|
+
(0, import_fs2.writeFileSync)((0, import_path.join)(api.cwd, jsonName), mdContent, { encoding: "utf-8", flag: "w" });
|
|
61
66
|
}
|
|
62
67
|
var shellFileContent = `#!/bin/sh
|
|
63
68
|
time=$(date "+%Y%-m%d")
|
|
@@ -106,10 +111,17 @@ async function getGitRepositoryAndBranch() {
|
|
|
106
111
|
function generateReport(jsonStr) {
|
|
107
112
|
(0, import_fs2.writeFileSync)((0, import_path.join)(process.cwd(), `${(0, import_dayjs.default)().format("YYYYMDD_HHmm")}_${jsonName}`), jsonStr, { encoding: "utf-8", flag: "w" });
|
|
108
113
|
}
|
|
114
|
+
async function sameCodeDetect(dirOfCwd) {
|
|
115
|
+
const filesAndContent = await (0, import_readDirFiles.readSrcFiles)(dirOfCwd);
|
|
116
|
+
const { nodeContentGroupList } = import_Core.default.investigate(filesAndContent);
|
|
117
|
+
const md = import_Core.default.createMarkdownFile(nodeContentGroupList.slice(0, 5));
|
|
118
|
+
(0, import_fs2.writeFileSync)((0, import_path.join)(process.cwd(), `${(0, import_dayjs.default)().format("YYYYMDD_HHmm")}_same_code.md`), md, { encoding: "utf-8", flag: "w" });
|
|
119
|
+
}
|
|
109
120
|
// Annotate the CommonJS export names for ESM import in node:
|
|
110
121
|
0 && (module.exports = {
|
|
111
122
|
generateReport,
|
|
112
123
|
getGitRepositoryAndBranch,
|
|
124
|
+
sameCodeDetect,
|
|
113
125
|
umiPluginCallback,
|
|
114
126
|
writeGitDiffTxt
|
|
115
127
|
});
|
|
@@ -0,0 +1,63 @@
|
|
|
1
|
+
import { Comment, Node, Identifier } from "@babel/types";
|
|
2
|
+
export type MapHashKeyToAstNodeSet = Map<string, Set<AstFeatNode>>;
|
|
3
|
+
export interface AstFeatNode {
|
|
4
|
+
type: Node["type"];
|
|
5
|
+
name?: Identifier | string;
|
|
6
|
+
leadingComments?: Comment[] | null;
|
|
7
|
+
innerComments?: Comment[] | null;
|
|
8
|
+
trailingComments?: Comment[] | null;
|
|
9
|
+
start?: number | null;
|
|
10
|
+
end?: number | null;
|
|
11
|
+
loc?: Record<'start' | 'end', {
|
|
12
|
+
line: number;
|
|
13
|
+
}> | null;
|
|
14
|
+
range?: [number, number];
|
|
15
|
+
extra?: Record<string, unknown>;
|
|
16
|
+
_util: {
|
|
17
|
+
startLine: number;
|
|
18
|
+
endLine: number;
|
|
19
|
+
mapHashKeyToString: Map<string, string>;
|
|
20
|
+
mapDepthDiffToHashKey: Map<number, string>;
|
|
21
|
+
filePath: string;
|
|
22
|
+
hashKey: string;
|
|
23
|
+
nodeCollection: AstFeatNode[];
|
|
24
|
+
childrenNode: AstFeatNode[];
|
|
25
|
+
ancestors: AstFeatNode[];
|
|
26
|
+
depth: number;
|
|
27
|
+
};
|
|
28
|
+
[p: string]: any;
|
|
29
|
+
}
|
|
30
|
+
export default class AstFeatUtil {
|
|
31
|
+
static skipHashCreateTypes: string[];
|
|
32
|
+
static invalidNodeKey: string[];
|
|
33
|
+
static AstNodeTypeConf: Map<string, {
|
|
34
|
+
FEATURE_LEVEL: string[];
|
|
35
|
+
}>;
|
|
36
|
+
static validRuleListOfAstNodeType: {
|
|
37
|
+
test: (astNode: AstFeatNode) => boolean;
|
|
38
|
+
FEATURE_LEVEL: string[];
|
|
39
|
+
}[];
|
|
40
|
+
static isValidNodeCollect(astNode: AstFeatNode): boolean;
|
|
41
|
+
static isValidArrayNodeCollect(astNode: AstFeatNode): boolean;
|
|
42
|
+
static createHashSeed(nodeCollection: AstFeatNode[]): string;
|
|
43
|
+
static createHashKey(str: string): string;
|
|
44
|
+
static addAncestorForNode(nodeCollection: AstFeatNode[], ancestor: AstFeatNode): void;
|
|
45
|
+
static deepFirstTravel(node: AstFeatNode, mapRecord: {
|
|
46
|
+
mapHashKeyToTopLevelNode: MapHashKeyToAstNodeSet;
|
|
47
|
+
nodeTypeSet: Set<string>;
|
|
48
|
+
}, filePath: string): AstFeatNode | undefined;
|
|
49
|
+
static _deepFirstTravel(node: AstFeatNode, visitedNodeSet: Set<typeof node>, mapRecord: {
|
|
50
|
+
mapHashKeyToTopLevelNode: MapHashKeyToAstNodeSet;
|
|
51
|
+
nodeTypeSet: Set<string>;
|
|
52
|
+
}, filePath: string, depth: number): AstFeatNode;
|
|
53
|
+
static updateLoc(astNode: AstFeatNode): void;
|
|
54
|
+
static updateNodeSetOfDepth(mapHashKeyToTopLevelNode: MapHashKeyToAstNodeSet, rootNode: AstFeatNode, nodeCollection: AstFeatNode[]): void;
|
|
55
|
+
static updateNodeSetByHashKey(hashKey: string, node: AstFeatNode, mapHashKeyToNode: MapHashKeyToAstNodeSet): void;
|
|
56
|
+
static deleteSameSubSetPartial(mapHashKeyToTopLevelNode: MapHashKeyToAstNodeSet): void;
|
|
57
|
+
static spreadSubNode(nodeSet: Set<AstFeatNode>, depthDiff: number): {
|
|
58
|
+
rootNode: AstFeatNode;
|
|
59
|
+
edgeNodeCollection: AstFeatNode[];
|
|
60
|
+
commonHashKeyNodesOrdered: AstFeatNode[][];
|
|
61
|
+
}[];
|
|
62
|
+
static createNodeHashKey(nodeList: AstFeatNode[], mapHashKeyToString: Map<string, string>): string;
|
|
63
|
+
}
|
|
@@ -0,0 +1,330 @@
|
|
|
1
|
+
var __create = Object.create;
|
|
2
|
+
var __defProp = Object.defineProperty;
|
|
3
|
+
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
4
|
+
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
5
|
+
var __getProtoOf = Object.getPrototypeOf;
|
|
6
|
+
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
|
7
|
+
var __export = (target, all) => {
|
|
8
|
+
for (var name in all)
|
|
9
|
+
__defProp(target, name, { get: all[name], enumerable: true });
|
|
10
|
+
};
|
|
11
|
+
var __copyProps = (to, from, except, desc) => {
|
|
12
|
+
if (from && typeof from === "object" || typeof from === "function") {
|
|
13
|
+
for (let key of __getOwnPropNames(from))
|
|
14
|
+
if (!__hasOwnProp.call(to, key) && key !== except)
|
|
15
|
+
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
|
16
|
+
}
|
|
17
|
+
return to;
|
|
18
|
+
};
|
|
19
|
+
var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
|
|
20
|
+
// If the importer is in node compatibility mode or this is not an ESM
|
|
21
|
+
// file that has been converted to a CommonJS file using a Babel-
|
|
22
|
+
// compatible transform (i.e. "__esModule" has not been set), then set
|
|
23
|
+
// "default" to the CommonJS "module.exports" for node compatibility.
|
|
24
|
+
isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
|
|
25
|
+
mod
|
|
26
|
+
));
|
|
27
|
+
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
|
28
|
+
|
|
29
|
+
// src/util/ast_util/AstFeatUtil.ts
|
|
30
|
+
var AstFeatUtil_exports = {};
|
|
31
|
+
__export(AstFeatUtil_exports, {
|
|
32
|
+
default: () => AstFeatUtil
|
|
33
|
+
});
|
|
34
|
+
module.exports = __toCommonJS(AstFeatUtil_exports);
|
|
35
|
+
var import_lodash_es = require("lodash-es");
|
|
36
|
+
var import_sha1 = __toESM(require("crypto-js/sha1"));
|
|
37
|
+
var import_enc_base64 = __toESM(require("crypto-js/enc-base64"));
|
|
38
|
+
var import_featureLevel = require("../shared/featureLevel");
|
|
39
|
+
var import_astNodeFeatureExtractorMap = require("../shared/astNodeFeatureExtractorMap");
|
|
40
|
+
var AstFeatUtil = class {
|
|
41
|
+
// 单节点判断
|
|
42
|
+
static isValidNodeCollect(astNode) {
|
|
43
|
+
var _a;
|
|
44
|
+
const isTypeString = typeof (astNode == null ? void 0 : astNode.type) === "string";
|
|
45
|
+
if (!isTypeString) {
|
|
46
|
+
return false;
|
|
47
|
+
}
|
|
48
|
+
const ruleMatchAndPassed = this.validRuleListOfAstNodeType.find(({ test, FEATURE_LEVEL }) => {
|
|
49
|
+
return FEATURE_LEVEL.includes(import_featureLevel.AST_NODE_FEATURE_LEVEL.EXACT) && test(astNode);
|
|
50
|
+
});
|
|
51
|
+
if (ruleMatchAndPassed) {
|
|
52
|
+
return false;
|
|
53
|
+
}
|
|
54
|
+
if ((_a = this.AstNodeTypeConf.get(astNode.type)) == null ? void 0 : _a.FEATURE_LEVEL.includes(import_featureLevel.AST_NODE_FEATURE_LEVEL.EXACT)) {
|
|
55
|
+
return false;
|
|
56
|
+
}
|
|
57
|
+
return true;
|
|
58
|
+
}
|
|
59
|
+
// 数组节点 判断
|
|
60
|
+
static isValidArrayNodeCollect(astNode) {
|
|
61
|
+
return Array.isArray(astNode) && astNode.some((v) => typeof (v == null ? void 0 : v.type) === "string");
|
|
62
|
+
}
|
|
63
|
+
static createHashSeed(nodeCollection) {
|
|
64
|
+
const depthSet = /* @__PURE__ */ new Set();
|
|
65
|
+
nodeCollection.forEach((n) => depthSet.add(n._util.depth));
|
|
66
|
+
const astNodeFeatureExtractor = import_astNodeFeatureExtractorMap.astNodeFeatureExtractorMap.get(import_featureLevel.AST_NODE_FEATURE_LEVEL.EXACT);
|
|
67
|
+
return nodeCollection.map((n) => astNodeFeatureExtractor(n)).join(":");
|
|
68
|
+
}
|
|
69
|
+
static createHashKey(str) {
|
|
70
|
+
return import_enc_base64.default.stringify((0, import_sha1.default)(str));
|
|
71
|
+
}
|
|
72
|
+
static addAncestorForNode(nodeCollection, ancestor) {
|
|
73
|
+
nodeCollection.forEach((nodeItem) => {
|
|
74
|
+
nodeItem._util.ancestors.unshift(ancestor);
|
|
75
|
+
});
|
|
76
|
+
}
|
|
77
|
+
static deepFirstTravel(node, mapRecord, filePath) {
|
|
78
|
+
const visitedNodeSet = /* @__PURE__ */ new Set();
|
|
79
|
+
if (!node) {
|
|
80
|
+
return;
|
|
81
|
+
}
|
|
82
|
+
return this._deepFirstTravel(node, visitedNodeSet, mapRecord, filePath, 0);
|
|
83
|
+
}
|
|
84
|
+
static _deepFirstTravel(node, visitedNodeSet, mapRecord, filePath, depth) {
|
|
85
|
+
visitedNodeSet.add(node);
|
|
86
|
+
const { mapHashKeyToTopLevelNode, nodeTypeSet } = mapRecord;
|
|
87
|
+
nodeTypeSet.add(node.type);
|
|
88
|
+
const _util = {
|
|
89
|
+
startLine: NaN,
|
|
90
|
+
endLine: NaN,
|
|
91
|
+
mapHashKeyToString: /* @__PURE__ */ new Map(),
|
|
92
|
+
mapDepthDiffToHashKey: /* @__PURE__ */ new Map(),
|
|
93
|
+
filePath,
|
|
94
|
+
hashKey: "",
|
|
95
|
+
nodeCollection: [],
|
|
96
|
+
childrenNode: [],
|
|
97
|
+
ancestors: [],
|
|
98
|
+
depth
|
|
99
|
+
};
|
|
100
|
+
node._util = _util;
|
|
101
|
+
const { nodeCollection, childrenNode } = _util;
|
|
102
|
+
Object.keys(node).forEach((nodeKey) => {
|
|
103
|
+
if (this.invalidNodeKey.includes(nodeKey)) {
|
|
104
|
+
return;
|
|
105
|
+
}
|
|
106
|
+
const nodeValue = node[nodeKey];
|
|
107
|
+
if (visitedNodeSet.has(nodeValue) || !node) {
|
|
108
|
+
return;
|
|
109
|
+
}
|
|
110
|
+
if (this.isValidNodeCollect(nodeValue)) {
|
|
111
|
+
const childNode = this._deepFirstTravel(nodeValue, visitedNodeSet, mapRecord, filePath, depth + 1);
|
|
112
|
+
nodeCollection.push(childNode, ...childNode._util.nodeCollection);
|
|
113
|
+
childrenNode.push(childNode);
|
|
114
|
+
} else if (this.isValidArrayNodeCollect(nodeValue)) {
|
|
115
|
+
const validNodeArray = nodeValue.filter((nodeItem) => this.isValidNodeCollect(nodeItem)).map((v) => {
|
|
116
|
+
return this._deepFirstTravel(v, visitedNodeSet, mapRecord, filePath, depth + 1);
|
|
117
|
+
});
|
|
118
|
+
nodeCollection.push(...validNodeArray.map((n) => [n, ...n._util.nodeCollection]).flat());
|
|
119
|
+
childrenNode.push(...validNodeArray);
|
|
120
|
+
}
|
|
121
|
+
});
|
|
122
|
+
this.updateLoc(node);
|
|
123
|
+
this.addAncestorForNode(nodeCollection, node);
|
|
124
|
+
this.updateNodeSetOfDepth(mapHashKeyToTopLevelNode, node, nodeCollection);
|
|
125
|
+
return node;
|
|
126
|
+
}
|
|
127
|
+
static updateLoc(astNode) {
|
|
128
|
+
var _a, _b;
|
|
129
|
+
const { _util } = astNode;
|
|
130
|
+
const { nodeCollection } = _util;
|
|
131
|
+
_util.startLine = Math.min(...nodeCollection.map((n) => {
|
|
132
|
+
var _a2, _b2;
|
|
133
|
+
return (_b2 = (_a2 = n.loc) == null ? void 0 : _a2.start) == null ? void 0 : _b2.line;
|
|
134
|
+
}), (_a = astNode.loc) == null ? void 0 : _a.start.line);
|
|
135
|
+
_util.endLine = Math.max(...nodeCollection.map((n) => {
|
|
136
|
+
var _a2, _b2;
|
|
137
|
+
return (_b2 = (_a2 = n.loc) == null ? void 0 : _a2.end) == null ? void 0 : _b2.line;
|
|
138
|
+
}), (_b = astNode.loc) == null ? void 0 : _b.end.line);
|
|
139
|
+
}
|
|
140
|
+
static updateNodeSetOfDepth(mapHashKeyToTopLevelNode, rootNode, nodeCollection) {
|
|
141
|
+
if (this.skipHashCreateTypes.includes(rootNode.type)) {
|
|
142
|
+
return;
|
|
143
|
+
}
|
|
144
|
+
const { mapDepthDiffToHashKey, depth: baseDepth, mapHashKeyToString } = rootNode._util;
|
|
145
|
+
const maxDepth = Math.max(...nodeCollection.map((n) => n._util.depth));
|
|
146
|
+
if (!Number.isInteger(maxDepth)) {
|
|
147
|
+
return;
|
|
148
|
+
}
|
|
149
|
+
let tmp = nodeCollection;
|
|
150
|
+
for (let i = maxDepth; i > baseDepth; i--) {
|
|
151
|
+
tmp = tmp.filter((n) => n._util.depth <= i);
|
|
152
|
+
const hashKey = this.createNodeHashKey(tmp, mapHashKeyToString);
|
|
153
|
+
const depthDiff = i - baseDepth;
|
|
154
|
+
mapDepthDiffToHashKey.set(depthDiff, hashKey);
|
|
155
|
+
this.updateNodeSetByHashKey(`${depthDiff}:${hashKey}`, rootNode, mapHashKeyToTopLevelNode);
|
|
156
|
+
}
|
|
157
|
+
}
|
|
158
|
+
static updateNodeSetByHashKey(hashKey, node, mapHashKeyToNode) {
|
|
159
|
+
const oldSet = mapHashKeyToNode.get(hashKey);
|
|
160
|
+
if (oldSet) {
|
|
161
|
+
oldSet.add(node);
|
|
162
|
+
} else {
|
|
163
|
+
mapHashKeyToNode.set(hashKey, /* @__PURE__ */ new Set([node]));
|
|
164
|
+
}
|
|
165
|
+
;
|
|
166
|
+
}
|
|
167
|
+
static deleteSameSubSetPartial(mapHashKeyToTopLevelNode) {
|
|
168
|
+
const shallowCopy = new Map(mapHashKeyToTopLevelNode);
|
|
169
|
+
for (const [hashKey, nodeSet] of shallowCopy) {
|
|
170
|
+
if (nodeSet.size > 1) {
|
|
171
|
+
const depthDiff = Number(hashKey.split(":")[0]);
|
|
172
|
+
const hasKeyList = [...nodeSet].map((item) => {
|
|
173
|
+
const parent = [...item._util.ancestors].pop();
|
|
174
|
+
if (parent) {
|
|
175
|
+
const { mapDepthDiffToHashKey } = parent._util;
|
|
176
|
+
const tmpHashKey = mapDepthDiffToHashKey.get(depthDiff + 1);
|
|
177
|
+
return tmpHashKey;
|
|
178
|
+
}
|
|
179
|
+
return void 0;
|
|
180
|
+
});
|
|
181
|
+
const only = new Set(hasKeyList).size === 1 && hasKeyList[0] !== void 0;
|
|
182
|
+
if (only) {
|
|
183
|
+
mapHashKeyToTopLevelNode.delete(hashKey);
|
|
184
|
+
}
|
|
185
|
+
}
|
|
186
|
+
}
|
|
187
|
+
}
|
|
188
|
+
static spreadSubNode(nodeSet, depthDiff) {
|
|
189
|
+
const listOfMaxDepthDiff = [];
|
|
190
|
+
const listOfNodeInfo = [...nodeSet].map((rootNode) => {
|
|
191
|
+
const baseDepth = rootNode._util.depth;
|
|
192
|
+
const edgeNodeCollection = rootNode._util.nodeCollection.filter((n) => n._util.depth - baseDepth === depthDiff);
|
|
193
|
+
const maxItemDepthDiff = Math.max(...edgeNodeCollection.map((node) => node._util.depth - baseDepth));
|
|
194
|
+
listOfMaxDepthDiff.push(maxItemDepthDiff);
|
|
195
|
+
return {
|
|
196
|
+
rootNode,
|
|
197
|
+
edgeNodeCollection,
|
|
198
|
+
commonHashKeyNodesOrdered: []
|
|
199
|
+
};
|
|
200
|
+
});
|
|
201
|
+
const maxDepth = Math.min(...listOfMaxDepthDiff);
|
|
202
|
+
let listOfCollection = listOfNodeInfo.map((e) => e.edgeNodeCollection);
|
|
203
|
+
for (let i = 1; i <= maxDepth; i++) {
|
|
204
|
+
const hashKeyList = listOfCollection.map((nodes) => nodes.map((node) => node._util.mapDepthDiffToHashKey.get(1)).filter(Boolean));
|
|
205
|
+
const commonHashKeyList = (0, import_lodash_es.intersection)(...hashKeyList);
|
|
206
|
+
listOfCollection = listOfCollection.map((nodes) => nodes.filter((node) => commonHashKeyList.includes(node._util.mapDepthDiffToHashKey.get(1))));
|
|
207
|
+
if (listOfCollection.every((collection) => collection.length > 0)) {
|
|
208
|
+
listOfCollection.forEach((col, index) => {
|
|
209
|
+
listOfNodeInfo[index].commonHashKeyNodesOrdered.push(col);
|
|
210
|
+
});
|
|
211
|
+
}
|
|
212
|
+
}
|
|
213
|
+
return listOfNodeInfo;
|
|
214
|
+
}
|
|
215
|
+
static createNodeHashKey(nodeList, mapHashKeyToString) {
|
|
216
|
+
const seed = this.createHashSeed(nodeList);
|
|
217
|
+
const hashKey = this.createHashKey(seed);
|
|
218
|
+
mapHashKeyToString.set(hashKey, seed);
|
|
219
|
+
return hashKey;
|
|
220
|
+
}
|
|
221
|
+
};
|
|
222
|
+
AstFeatUtil.skipHashCreateTypes = ["Program", "File", "ExportDeclaration"];
|
|
223
|
+
AstFeatUtil.invalidNodeKey = [
|
|
224
|
+
"comments",
|
|
225
|
+
"tokens"
|
|
226
|
+
];
|
|
227
|
+
// 忽略的 节点类型
|
|
228
|
+
AstFeatUtil.AstNodeTypeConf = /* @__PURE__ */ new Map([
|
|
229
|
+
["ImportDeclaration", {
|
|
230
|
+
FEATURE_LEVEL: [
|
|
231
|
+
import_featureLevel.AST_NODE_FEATURE_LEVEL.LOOSE,
|
|
232
|
+
import_featureLevel.AST_NODE_FEATURE_LEVEL.NORMAL,
|
|
233
|
+
import_featureLevel.AST_NODE_FEATURE_LEVEL.EXACT
|
|
234
|
+
]
|
|
235
|
+
}],
|
|
236
|
+
["TypeAlias", {
|
|
237
|
+
FEATURE_LEVEL: [
|
|
238
|
+
import_featureLevel.AST_NODE_FEATURE_LEVEL.LOOSE,
|
|
239
|
+
import_featureLevel.AST_NODE_FEATURE_LEVEL.NORMAL
|
|
240
|
+
]
|
|
241
|
+
}],
|
|
242
|
+
["VExpressionContainer", {
|
|
243
|
+
FEATURE_LEVEL: [
|
|
244
|
+
import_featureLevel.AST_NODE_FEATURE_LEVEL.LOOSE,
|
|
245
|
+
import_featureLevel.AST_NODE_FEATURE_LEVEL.NORMAL
|
|
246
|
+
]
|
|
247
|
+
}],
|
|
248
|
+
["VStartTag", {
|
|
249
|
+
FEATURE_LEVEL: [
|
|
250
|
+
import_featureLevel.AST_NODE_FEATURE_LEVEL.LOOSE
|
|
251
|
+
]
|
|
252
|
+
}],
|
|
253
|
+
["VEndTag", {
|
|
254
|
+
FEATURE_LEVEL: [
|
|
255
|
+
import_featureLevel.AST_NODE_FEATURE_LEVEL.LOOSE
|
|
256
|
+
]
|
|
257
|
+
}],
|
|
258
|
+
["VText", {
|
|
259
|
+
FEATURE_LEVEL: [
|
|
260
|
+
import_featureLevel.AST_NODE_FEATURE_LEVEL.LOOSE,
|
|
261
|
+
import_featureLevel.AST_NODE_FEATURE_LEVEL.NORMAL
|
|
262
|
+
]
|
|
263
|
+
}],
|
|
264
|
+
["VDocumentFragment", {
|
|
265
|
+
FEATURE_LEVEL: [
|
|
266
|
+
import_featureLevel.AST_NODE_FEATURE_LEVEL.LOOSE,
|
|
267
|
+
import_featureLevel.AST_NODE_FEATURE_LEVEL.NORMAL
|
|
268
|
+
]
|
|
269
|
+
}],
|
|
270
|
+
["CommentLine", {
|
|
271
|
+
FEATURE_LEVEL: [
|
|
272
|
+
import_featureLevel.AST_NODE_FEATURE_LEVEL.LOOSE,
|
|
273
|
+
import_featureLevel.AST_NODE_FEATURE_LEVEL.NORMAL,
|
|
274
|
+
import_featureLevel.AST_NODE_FEATURE_LEVEL.EXACT
|
|
275
|
+
]
|
|
276
|
+
}],
|
|
277
|
+
["CommentBlock", {
|
|
278
|
+
FEATURE_LEVEL: [
|
|
279
|
+
import_featureLevel.AST_NODE_FEATURE_LEVEL.LOOSE,
|
|
280
|
+
import_featureLevel.AST_NODE_FEATURE_LEVEL.NORMAL,
|
|
281
|
+
import_featureLevel.AST_NODE_FEATURE_LEVEL.EXACT
|
|
282
|
+
]
|
|
283
|
+
}],
|
|
284
|
+
["JSXIdentifier", {
|
|
285
|
+
FEATURE_LEVEL: [
|
|
286
|
+
import_featureLevel.AST_NODE_FEATURE_LEVEL.LOOSE,
|
|
287
|
+
import_featureLevel.AST_NODE_FEATURE_LEVEL.NORMAL
|
|
288
|
+
]
|
|
289
|
+
}],
|
|
290
|
+
["JSXAttribute", {
|
|
291
|
+
FEATURE_LEVEL: [
|
|
292
|
+
import_featureLevel.AST_NODE_FEATURE_LEVEL.LOOSE,
|
|
293
|
+
import_featureLevel.AST_NODE_FEATURE_LEVEL.NORMAL
|
|
294
|
+
]
|
|
295
|
+
}],
|
|
296
|
+
["EmptyStatement", {
|
|
297
|
+
FEATURE_LEVEL: [
|
|
298
|
+
import_featureLevel.AST_NODE_FEATURE_LEVEL.LOOSE,
|
|
299
|
+
import_featureLevel.AST_NODE_FEATURE_LEVEL.NORMAL
|
|
300
|
+
]
|
|
301
|
+
}],
|
|
302
|
+
["JSXText", {
|
|
303
|
+
FEATURE_LEVEL: [
|
|
304
|
+
import_featureLevel.AST_NODE_FEATURE_LEVEL.LOOSE,
|
|
305
|
+
import_featureLevel.AST_NODE_FEATURE_LEVEL.NORMAL
|
|
306
|
+
]
|
|
307
|
+
}],
|
|
308
|
+
["JSXClosingElement", {
|
|
309
|
+
FEATURE_LEVEL: [
|
|
310
|
+
import_featureLevel.AST_NODE_FEATURE_LEVEL.LOOSE,
|
|
311
|
+
import_featureLevel.AST_NODE_FEATURE_LEVEL.NORMAL
|
|
312
|
+
]
|
|
313
|
+
}]
|
|
314
|
+
]);
|
|
315
|
+
AstFeatUtil.validRuleListOfAstNodeType = [
|
|
316
|
+
{
|
|
317
|
+
test: (astNode) => /^TS\w+/.test(astNode.type),
|
|
318
|
+
FEATURE_LEVEL: [
|
|
319
|
+
import_featureLevel.AST_NODE_FEATURE_LEVEL.LOOSE,
|
|
320
|
+
import_featureLevel.AST_NODE_FEATURE_LEVEL.NORMAL
|
|
321
|
+
]
|
|
322
|
+
},
|
|
323
|
+
{
|
|
324
|
+
test: (astNode) => /Literal$/.test(astNode.type),
|
|
325
|
+
FEATURE_LEVEL: [
|
|
326
|
+
import_featureLevel.AST_NODE_FEATURE_LEVEL.LOOSE,
|
|
327
|
+
import_featureLevel.AST_NODE_FEATURE_LEVEL.NORMAL
|
|
328
|
+
]
|
|
329
|
+
}
|
|
330
|
+
];
|
|
@@ -0,0 +1,70 @@
|
|
|
1
|
+
import { FileSegment, MapFilePathToDetail } from "./FileUtil";
|
|
2
|
+
import AstFeatUtil, { AstFeatNode, MapHashKeyToAstNodeSet } from "./AstFeatUtil";
|
|
3
|
+
export default class Core {
|
|
4
|
+
static createMarkdownFile(result: {
|
|
5
|
+
list: {
|
|
6
|
+
content: string;
|
|
7
|
+
location: string;
|
|
8
|
+
}[];
|
|
9
|
+
hashKey: string;
|
|
10
|
+
}[]): string;
|
|
11
|
+
static investigate(fileList: FileSegment[]): {
|
|
12
|
+
errorList: string[];
|
|
13
|
+
nodeTypeSet: Set<string>;
|
|
14
|
+
nodeContentGroupList: {
|
|
15
|
+
list: {
|
|
16
|
+
astNode: AstFeatNode;
|
|
17
|
+
location: string;
|
|
18
|
+
content: string;
|
|
19
|
+
}[];
|
|
20
|
+
listOfNodeInfo: {
|
|
21
|
+
rootNode: AstFeatNode;
|
|
22
|
+
edgeNodeCollection: AstFeatNode[];
|
|
23
|
+
commonHashKeyNodesOrdered: AstFeatNode[][];
|
|
24
|
+
}[];
|
|
25
|
+
nodeCount: number;
|
|
26
|
+
depth: number;
|
|
27
|
+
hashKey: string;
|
|
28
|
+
}[];
|
|
29
|
+
nodeGroupList: {
|
|
30
|
+
list: AstFeatNode[];
|
|
31
|
+
listOfNodeInfo: {
|
|
32
|
+
rootNode: AstFeatNode;
|
|
33
|
+
edgeNodeCollection: AstFeatNode[];
|
|
34
|
+
commonHashKeyNodesOrdered: AstFeatNode[][];
|
|
35
|
+
}[];
|
|
36
|
+
nodeCount: number;
|
|
37
|
+
depth: number;
|
|
38
|
+
hashKey: string;
|
|
39
|
+
}[];
|
|
40
|
+
countList: number[];
|
|
41
|
+
depthList: number[];
|
|
42
|
+
};
|
|
43
|
+
static getFileContentByLine(map: MapFilePathToDetail, filePath: string, start: number, end: number): {
|
|
44
|
+
location: string;
|
|
45
|
+
content: string;
|
|
46
|
+
};
|
|
47
|
+
static getListOfGroup(mapFilePathToDetail: MapFilePathToDetail, validFullNodeList: ReturnType<typeof Core.getListOfNodeGroup>['nodeGroupList']): {
|
|
48
|
+
list: {
|
|
49
|
+
astNode: AstFeatNode;
|
|
50
|
+
location: string;
|
|
51
|
+
content: string;
|
|
52
|
+
}[];
|
|
53
|
+
listOfNodeInfo: (typeof validFullNodeList)[number]["listOfNodeInfo"];
|
|
54
|
+
nodeCount: number;
|
|
55
|
+
depth: number;
|
|
56
|
+
hashKey: string;
|
|
57
|
+
}[];
|
|
58
|
+
static createMapHashKeyToAstNodeSet(fileList: FileSegment[]): readonly [MapHashKeyToAstNodeSet, Set<string>, string[]];
|
|
59
|
+
static getListOfNodeGroup(mapHashKeyToNodeSet: MapHashKeyToAstNodeSet): {
|
|
60
|
+
nodeGroupList: {
|
|
61
|
+
list: AstFeatNode[];
|
|
62
|
+
listOfNodeInfo: ReturnType<typeof AstFeatUtil.spreadSubNode>;
|
|
63
|
+
nodeCount: number;
|
|
64
|
+
depth: number;
|
|
65
|
+
hashKey: string;
|
|
66
|
+
}[];
|
|
67
|
+
countList: number[];
|
|
68
|
+
depthList: number[];
|
|
69
|
+
};
|
|
70
|
+
}
|
|
@@ -0,0 +1,193 @@
|
|
|
1
|
+
var __create = Object.create;
|
|
2
|
+
var __defProp = Object.defineProperty;
|
|
3
|
+
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
4
|
+
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
5
|
+
var __getProtoOf = Object.getPrototypeOf;
|
|
6
|
+
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
|
7
|
+
var __export = (target, all) => {
|
|
8
|
+
for (var name in all)
|
|
9
|
+
__defProp(target, name, { get: all[name], enumerable: true });
|
|
10
|
+
};
|
|
11
|
+
var __copyProps = (to, from, except, desc) => {
|
|
12
|
+
if (from && typeof from === "object" || typeof from === "function") {
|
|
13
|
+
for (let key of __getOwnPropNames(from))
|
|
14
|
+
if (!__hasOwnProp.call(to, key) && key !== except)
|
|
15
|
+
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
|
16
|
+
}
|
|
17
|
+
return to;
|
|
18
|
+
};
|
|
19
|
+
var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
|
|
20
|
+
// If the importer is in node compatibility mode or this is not an ESM
|
|
21
|
+
// file that has been converted to a CommonJS file using a Babel-
|
|
22
|
+
// compatible transform (i.e. "__esModule" has not been set), then set
|
|
23
|
+
// "default" to the CommonJS "module.exports" for node compatibility.
|
|
24
|
+
isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
|
|
25
|
+
mod
|
|
26
|
+
));
|
|
27
|
+
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
|
28
|
+
|
|
29
|
+
// src/util/ast_util/Core.ts
|
|
30
|
+
var Core_exports = {};
|
|
31
|
+
__export(Core_exports, {
|
|
32
|
+
default: () => Core
|
|
33
|
+
});
|
|
34
|
+
module.exports = __toCommonJS(Core_exports);
|
|
35
|
+
var import_lodash_es = require("lodash-es");
|
|
36
|
+
var import_FileUtil = __toESM(require("./FileUtil"));
|
|
37
|
+
var import_AstFeatUtil = __toESM(require("./AstFeatUtil"));
|
|
38
|
+
var Core = class {
|
|
39
|
+
static createMarkdownFile(result) {
|
|
40
|
+
let str = "# 共找到" + result.length + "组相似内容\n";
|
|
41
|
+
result.forEach((item, i) => {
|
|
42
|
+
const { list, hashKey } = item;
|
|
43
|
+
const depth = hashKey.split(":")[0];
|
|
44
|
+
str += `## 第${i + 1}组, 共${list.length}段, 匹配深度 ${depth}
|
|
45
|
+
`;
|
|
46
|
+
list.forEach(({ content, location }, index) => {
|
|
47
|
+
const filepath = location.split(/\s+/)[0];
|
|
48
|
+
let ext = import_FileUtil.extensions.find((e) => filepath.includes(e));
|
|
49
|
+
ext = (ext == null ? void 0 : ext.slice(1)) || "";
|
|
50
|
+
str += `### 第${index + 1}段
|
|
51
|
+
> ${location}
|
|
52
|
+
\`\`\`${ext}
|
|
53
|
+
${content}
|
|
54
|
+
\`\`\`
|
|
55
|
+
|
|
56
|
+
`;
|
|
57
|
+
});
|
|
58
|
+
});
|
|
59
|
+
return str;
|
|
60
|
+
}
|
|
61
|
+
static investigate(fileList) {
|
|
62
|
+
const mapFilePathToDetail = import_FileUtil.default.createMapFilePathToDetail(fileList);
|
|
63
|
+
const [mapHashKeyToTopLevelNode, nodeTypeSet, errorList] = this.createMapHashKeyToAstNodeSet(fileList);
|
|
64
|
+
const { nodeGroupList } = this.getListOfNodeGroup(mapHashKeyToTopLevelNode);
|
|
65
|
+
const nodeContentGroupList = this.getListOfGroup(mapFilePathToDetail, nodeGroupList);
|
|
66
|
+
return {
|
|
67
|
+
errorList,
|
|
68
|
+
nodeTypeSet,
|
|
69
|
+
nodeContentGroupList,
|
|
70
|
+
nodeGroupList,
|
|
71
|
+
countList: nodeContentGroupList.map((e) => e.nodeCount).sort((a, b) => a - b),
|
|
72
|
+
depthList: nodeContentGroupList.map((e) => e.depth).sort((a, b) => a - b)
|
|
73
|
+
};
|
|
74
|
+
}
|
|
75
|
+
static getFileContentByLine(map, filePath, start, end) {
|
|
76
|
+
const fixedStart = Math.max(start - 1, 0);
|
|
77
|
+
const content = map.get(filePath).lines.slice(fixedStart, end).join("\n");
|
|
78
|
+
const location = `${filePath}: from line ${start} to line ${end}`;
|
|
79
|
+
return { location, content };
|
|
80
|
+
}
|
|
81
|
+
static getListOfGroup(mapFilePathToDetail, validFullNodeList) {
|
|
82
|
+
const listOfGroup = [];
|
|
83
|
+
const locationStrSet = /* @__PURE__ */ new Set();
|
|
84
|
+
for (const item of validFullNodeList) {
|
|
85
|
+
const { listOfNodeInfo } = item;
|
|
86
|
+
const newList = listOfNodeInfo.map(({ rootNode, edgeNodeCollection }) => {
|
|
87
|
+
var _a;
|
|
88
|
+
const { filePath, startLine, endLine } = rootNode._util;
|
|
89
|
+
const linesNums = (0, import_lodash_es.range)(startLine, endLine + 1);
|
|
90
|
+
const edgeNodesLines = edgeNodeCollection.map((edgeNode) => {
|
|
91
|
+
const { startLine: itemStartLine, endLine: itemEndLine } = edgeNode._util;
|
|
92
|
+
if (itemStartLine === startLine || itemEndLine === endLine) {
|
|
93
|
+
return [];
|
|
94
|
+
}
|
|
95
|
+
return itemStartLine + 1 >= itemEndLine ? [] : (0, import_lodash_es.range)(itemStartLine + 1, itemEndLine);
|
|
96
|
+
}).flat();
|
|
97
|
+
const listOfRatedContent = [
|
|
98
|
+
{
|
|
99
|
+
linesNums: [],
|
|
100
|
+
lines: [],
|
|
101
|
+
rate: 0
|
|
102
|
+
}
|
|
103
|
+
];
|
|
104
|
+
const lines = ((_a = mapFilePathToDetail.get(filePath)) == null ? void 0 : _a.lines) || [];
|
|
105
|
+
if (edgeNodesLines.length > 0) {
|
|
106
|
+
for (let i = startLine; i <= endLine; i++) {
|
|
107
|
+
const flag = edgeNodesLines.includes(i);
|
|
108
|
+
const rate = flag ? 1 : 0;
|
|
109
|
+
let lastRatedContent = listOfRatedContent[listOfRatedContent.length - 1];
|
|
110
|
+
if (lastRatedContent.rate !== rate) {
|
|
111
|
+
listOfRatedContent.push(lastRatedContent = {
|
|
112
|
+
linesNums: [],
|
|
113
|
+
lines: [],
|
|
114
|
+
rate
|
|
115
|
+
});
|
|
116
|
+
}
|
|
117
|
+
lastRatedContent.lines.push(lines[i - 1]);
|
|
118
|
+
lastRatedContent.linesNums.push(i);
|
|
119
|
+
}
|
|
120
|
+
} else {
|
|
121
|
+
listOfRatedContent.push({ linesNums, lines: lines.slice(Math.max(startLine - 1, 0), endLine), rate: 0 });
|
|
122
|
+
}
|
|
123
|
+
const { location, content } = this.getFileContentByLine(mapFilePathToDetail, filePath, startLine, endLine);
|
|
124
|
+
return { location, content, astNode: rootNode, listOfRatedContent };
|
|
125
|
+
});
|
|
126
|
+
const locationStr = newList.map((e) => e.location).join();
|
|
127
|
+
if (!locationStrSet.has(locationStr)) {
|
|
128
|
+
locationStrSet.add(locationStr);
|
|
129
|
+
const { nodeCount, depth, hashKey, listOfNodeInfo: listOfNodeInfo2 } = item;
|
|
130
|
+
listOfGroup.push({
|
|
131
|
+
nodeCount,
|
|
132
|
+
depth,
|
|
133
|
+
hashKey,
|
|
134
|
+
listOfNodeInfo: listOfNodeInfo2,
|
|
135
|
+
list: newList
|
|
136
|
+
});
|
|
137
|
+
}
|
|
138
|
+
}
|
|
139
|
+
return listOfGroup;
|
|
140
|
+
}
|
|
141
|
+
static createMapHashKeyToAstNodeSet(fileList) {
|
|
142
|
+
const mapHashKeyToTopLevelNode = /* @__PURE__ */ new Map();
|
|
143
|
+
const nodeTypeSet = /* @__PURE__ */ new Set();
|
|
144
|
+
const errorList = [];
|
|
145
|
+
fileList.forEach((file) => {
|
|
146
|
+
const { filePath, fileContent } = file;
|
|
147
|
+
const [errorMsg, parsedNode] = import_FileUtil.default.parseFile(filePath, fileContent);
|
|
148
|
+
if (parsedNode) {
|
|
149
|
+
AstUtil.deepFirstTravel(parsedNode, {
|
|
150
|
+
mapHashKeyToTopLevelNode,
|
|
151
|
+
nodeTypeSet
|
|
152
|
+
}, filePath);
|
|
153
|
+
} else if (errorMsg) {
|
|
154
|
+
errorList.push(errorMsg);
|
|
155
|
+
}
|
|
156
|
+
});
|
|
157
|
+
import_AstFeatUtil.default.deleteSameSubSetPartial(mapHashKeyToTopLevelNode);
|
|
158
|
+
return [mapHashKeyToTopLevelNode, nodeTypeSet, errorList];
|
|
159
|
+
}
|
|
160
|
+
static getListOfNodeGroup(mapHashKeyToNodeSet) {
|
|
161
|
+
const nodeGroupList = [];
|
|
162
|
+
const countPartialSet = /* @__PURE__ */ new Set();
|
|
163
|
+
const depthSet = /* @__PURE__ */ new Set();
|
|
164
|
+
for (const [hashKey, nodeSet] of mapHashKeyToNodeSet) {
|
|
165
|
+
if (nodeSet.size < 2) {
|
|
166
|
+
continue;
|
|
167
|
+
}
|
|
168
|
+
const baseNode = [...nodeSet][0];
|
|
169
|
+
let nodeCount = 0;
|
|
170
|
+
const index = hashKey.indexOf(":");
|
|
171
|
+
const depth = Number(hashKey.slice(0, index));
|
|
172
|
+
depthSet.add(depth);
|
|
173
|
+
const baseDepth = baseNode._util.depth;
|
|
174
|
+
nodeCount = baseNode._util.nodeCollection.filter((n) => n._util.depth - baseDepth <= depth).length;
|
|
175
|
+
countPartialSet.add(nodeCount);
|
|
176
|
+
const listOfNodeInfo = import_AstFeatUtil.default.spreadSubNode(nodeSet, depth);
|
|
177
|
+
nodeGroupList.push({
|
|
178
|
+
list: [...nodeSet],
|
|
179
|
+
listOfNodeInfo,
|
|
180
|
+
nodeCount,
|
|
181
|
+
depth,
|
|
182
|
+
hashKey
|
|
183
|
+
});
|
|
184
|
+
}
|
|
185
|
+
const countList = [...countPartialSet].sort((a, b) => a - b);
|
|
186
|
+
const depthList = [...depthSet].sort((a, b) => a - b);
|
|
187
|
+
return {
|
|
188
|
+
nodeGroupList,
|
|
189
|
+
countList,
|
|
190
|
+
depthList
|
|
191
|
+
};
|
|
192
|
+
}
|
|
193
|
+
};
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
import * as babelParse from "@babel/parser";
|
|
2
2
|
import * as vueParse from "vue-eslint-parser";
|
|
3
|
-
import {
|
|
3
|
+
import { AstFeatNode } from "./AstFeatUtil";
|
|
4
4
|
export declare const extensionsOfJs: string[];
|
|
5
5
|
export declare const extensions: string[];
|
|
6
6
|
export type MapFilePathToDetail = Map<string, FileSegment & {
|
|
@@ -14,6 +14,6 @@ export default class FileUtil {
|
|
|
14
14
|
static parseVue(filePath: string, fileContent: string): vueParse.AST.ESLintProgram;
|
|
15
15
|
static parseJsxLike(filePath: string, fileContent: string): babelParse.ParseResult<import("@babel/types").File>;
|
|
16
16
|
static createMapFilePathToDetail(list: FileSegment[]): MapFilePathToDetail;
|
|
17
|
-
static parseFile(filePath: string, fileContent: string): [string, Omit<
|
|
18
|
-
static getASTByFilePath(filePath: string): Omit<
|
|
17
|
+
static parseFile(filePath: string, fileContent: string): [string, Omit<AstFeatNode, '_util'> | null];
|
|
18
|
+
static getASTByFilePath(filePath: string): Omit<AstFeatNode, "_util"> | null;
|
|
19
19
|
}
|
|
@@ -0,0 +1,74 @@
|
|
|
1
|
+
var __defProp = Object.defineProperty;
|
|
2
|
+
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
3
|
+
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
4
|
+
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
|
5
|
+
var __export = (target, all) => {
|
|
6
|
+
for (var name in all)
|
|
7
|
+
__defProp(target, name, { get: all[name], enumerable: true });
|
|
8
|
+
};
|
|
9
|
+
var __copyProps = (to, from, except, desc) => {
|
|
10
|
+
if (from && typeof from === "object" || typeof from === "function") {
|
|
11
|
+
for (let key of __getOwnPropNames(from))
|
|
12
|
+
if (!__hasOwnProp.call(to, key) && key !== except)
|
|
13
|
+
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
|
14
|
+
}
|
|
15
|
+
return to;
|
|
16
|
+
};
|
|
17
|
+
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
|
18
|
+
|
|
19
|
+
// src/util/report_util/createMdByJson.ts
|
|
20
|
+
var createMdByJson_exports = {};
|
|
21
|
+
__export(createMdByJson_exports, {
|
|
22
|
+
createMdByJson: () => createMdByJson
|
|
23
|
+
});
|
|
24
|
+
module.exports = __toCommonJS(createMdByJson_exports);
|
|
25
|
+
var mapReportType = {
|
|
26
|
+
modify: "修改",
|
|
27
|
+
add: "新增",
|
|
28
|
+
delete: "删除"
|
|
29
|
+
};
|
|
30
|
+
function createMdByJson(report) {
|
|
31
|
+
return report.map(reportItemToMd).join("\n\n\n");
|
|
32
|
+
}
|
|
33
|
+
function reportItemToMd(report) {
|
|
34
|
+
const { filePath, filesDependsOnMe, type, dangerIdentifiers, blockReports } = report;
|
|
35
|
+
return [
|
|
36
|
+
`## ${filePath}`,
|
|
37
|
+
`- 类型: ${mapReportType[type]}`,
|
|
38
|
+
filesDependsOnMe.length > 0 ? `- 依赖${filePath}的文件` : "",
|
|
39
|
+
...filesDependsOnMe.map((file, i) => `${i + 1}. ${file}`),
|
|
40
|
+
dangerIdentifiers.length > 0 ? `- 重点检查使用的变量` : "",
|
|
41
|
+
dangerIdentifiers.length > 0 ? `> ${dangerIdentifiers.join(", ")}` : "",
|
|
42
|
+
blockReports.length > 0 ? `### 代码块分析` : "",
|
|
43
|
+
...blockReports.map(blockReportToMd)
|
|
44
|
+
].filter(Boolean).join("\n\n");
|
|
45
|
+
}
|
|
46
|
+
function blockReportToMd(block) {
|
|
47
|
+
const {
|
|
48
|
+
kind,
|
|
49
|
+
diff_txt,
|
|
50
|
+
added,
|
|
51
|
+
addedEffects,
|
|
52
|
+
removed,
|
|
53
|
+
removedEffects
|
|
54
|
+
} = block;
|
|
55
|
+
return [
|
|
56
|
+
`#### 修改分类: ${kind}`,
|
|
57
|
+
`- 原始diff内容`,
|
|
58
|
+
`\`\`\`txt
|
|
59
|
+
${diff_txt.join("\n")}
|
|
60
|
+
\`\`\``,
|
|
61
|
+
added.length > 0 ? `- 新增标识符` : "",
|
|
62
|
+
added.length > 0 ? `> ${added.join(", ")}` : "",
|
|
63
|
+
addedEffects.length > 0 ? `- 新增标识符影响` : "",
|
|
64
|
+
addedEffects.map(({ causeBy, effects }) => `> ${causeBy}相关: ${effects.join()}`).join("\n\n"),
|
|
65
|
+
removed.length > 0 ? `- 删除标识符` : "",
|
|
66
|
+
removed.length > 0 ? `> ${removed.join(", ")}` : "",
|
|
67
|
+
removedEffects.length > 0 ? `- 删除标识符影响` : "",
|
|
68
|
+
removedEffects.map(({ causeBy, effects }) => `> ${causeBy}相关: ${effects.join()}`).join("\n\n")
|
|
69
|
+
].filter(Boolean).join("\n\n");
|
|
70
|
+
}
|
|
71
|
+
// Annotate the CommonJS export names for ESM import in node:
|
|
72
|
+
0 && (module.exports = {
|
|
73
|
+
createMdByJson
|
|
74
|
+
});
|
|
@@ -1,4 +1,12 @@
|
|
|
1
1
|
import { GitDiffDetail } from "./format_git_diff_content";
|
|
2
|
+
import { BlockReport } from "./report_util/code_block_detect";
|
|
3
|
+
export type DetectReport = {
|
|
4
|
+
filePath: string;
|
|
5
|
+
type: "modify" | "add" | "delete";
|
|
6
|
+
filesDependsOnMe: string[];
|
|
7
|
+
dangerIdentifiers: string[];
|
|
8
|
+
blockReports: BlockReport[];
|
|
9
|
+
};
|
|
2
10
|
type Arg = {
|
|
3
11
|
groupGitDiffLines: GitDiffDetail[];
|
|
4
12
|
tree: Record<string, string[]>;
|
|
@@ -0,0 +1,59 @@
|
|
|
1
|
+
var __defProp = Object.defineProperty;
|
|
2
|
+
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
3
|
+
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
4
|
+
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
|
5
|
+
var __export = (target, all) => {
|
|
6
|
+
for (var name in all)
|
|
7
|
+
__defProp(target, name, { get: all[name], enumerable: true });
|
|
8
|
+
};
|
|
9
|
+
var __copyProps = (to, from, except, desc) => {
|
|
10
|
+
if (from && typeof from === "object" || typeof from === "function") {
|
|
11
|
+
for (let key of __getOwnPropNames(from))
|
|
12
|
+
if (!__hasOwnProp.call(to, key) && key !== except)
|
|
13
|
+
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
|
14
|
+
}
|
|
15
|
+
return to;
|
|
16
|
+
};
|
|
17
|
+
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
|
18
|
+
|
|
19
|
+
// src/util/shared/astNodeFeatureExtractorMap.ts
|
|
20
|
+
var astNodeFeatureExtractorMap_exports = {};
|
|
21
|
+
__export(astNodeFeatureExtractorMap_exports, {
|
|
22
|
+
astNodeFeatureExtractorMap: () => astNodeFeatureExtractorMap
|
|
23
|
+
});
|
|
24
|
+
module.exports = __toCommonJS(astNodeFeatureExtractorMap_exports);
|
|
25
|
+
var import_featureLevel = require("./featureLevel");
|
|
26
|
+
var astNodeFeatureExtractorMap = /* @__PURE__ */ new Map([
|
|
27
|
+
[
|
|
28
|
+
import_featureLevel.AST_NODE_FEATURE_LEVEL.LOOSE,
|
|
29
|
+
(a) => a.type
|
|
30
|
+
],
|
|
31
|
+
[
|
|
32
|
+
import_featureLevel.AST_NODE_FEATURE_LEVEL.NORMAL,
|
|
33
|
+
(n) => {
|
|
34
|
+
if (typeof n.name === "object") {
|
|
35
|
+
return n.type + ":" + n.name.name;
|
|
36
|
+
} else if (typeof n.name === "string") {
|
|
37
|
+
return n.type + ":" + n.name;
|
|
38
|
+
}
|
|
39
|
+
return n.type;
|
|
40
|
+
}
|
|
41
|
+
],
|
|
42
|
+
[
|
|
43
|
+
import_featureLevel.AST_NODE_FEATURE_LEVEL.EXACT,
|
|
44
|
+
(n) => {
|
|
45
|
+
if (typeof n.name === "object") {
|
|
46
|
+
return n.type + ":" + n.name.name;
|
|
47
|
+
} else if (typeof n.name === "string") {
|
|
48
|
+
return n.type + ":" + n.name;
|
|
49
|
+
} else if (n.extra && "raw" in n.extra) {
|
|
50
|
+
return n.type + ":" + n.extra.raw;
|
|
51
|
+
}
|
|
52
|
+
return n.type;
|
|
53
|
+
}
|
|
54
|
+
]
|
|
55
|
+
]);
|
|
56
|
+
// Annotate the CommonJS export names for ESM import in node:
|
|
57
|
+
0 && (module.exports = {
|
|
58
|
+
astNodeFeatureExtractorMap
|
|
59
|
+
});
|
|
@@ -0,0 +1,33 @@
|
|
|
1
|
+
var __defProp = Object.defineProperty;
|
|
2
|
+
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
3
|
+
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
4
|
+
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
|
5
|
+
var __export = (target, all) => {
|
|
6
|
+
for (var name in all)
|
|
7
|
+
__defProp(target, name, { get: all[name], enumerable: true });
|
|
8
|
+
};
|
|
9
|
+
var __copyProps = (to, from, except, desc) => {
|
|
10
|
+
if (from && typeof from === "object" || typeof from === "function") {
|
|
11
|
+
for (let key of __getOwnPropNames(from))
|
|
12
|
+
if (!__hasOwnProp.call(to, key) && key !== except)
|
|
13
|
+
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
|
14
|
+
}
|
|
15
|
+
return to;
|
|
16
|
+
};
|
|
17
|
+
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
|
18
|
+
|
|
19
|
+
// src/util/shared/featureLevel.ts
|
|
20
|
+
var featureLevel_exports = {};
|
|
21
|
+
__export(featureLevel_exports, {
|
|
22
|
+
AST_NODE_FEATURE_LEVEL: () => AST_NODE_FEATURE_LEVEL
|
|
23
|
+
});
|
|
24
|
+
module.exports = __toCommonJS(featureLevel_exports);
|
|
25
|
+
var AST_NODE_FEATURE_LEVEL = {
|
|
26
|
+
LOOSE: "LOOSE",
|
|
27
|
+
NORMAL: "NORMAL",
|
|
28
|
+
EXACT: "EXACT"
|
|
29
|
+
};
|
|
30
|
+
// Annotate the CommonJS export names for ESM import in node:
|
|
31
|
+
0 && (module.exports = {
|
|
32
|
+
AST_NODE_FEATURE_LEVEL
|
|
33
|
+
});
|
|
@@ -0,0 +1,45 @@
|
|
|
1
|
+
var __defProp = Object.defineProperty;
|
|
2
|
+
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
3
|
+
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
4
|
+
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
|
5
|
+
var __export = (target, all) => {
|
|
6
|
+
for (var name in all)
|
|
7
|
+
__defProp(target, name, { get: all[name], enumerable: true });
|
|
8
|
+
};
|
|
9
|
+
var __copyProps = (to, from, except, desc) => {
|
|
10
|
+
if (from && typeof from === "object" || typeof from === "function") {
|
|
11
|
+
for (let key of __getOwnPropNames(from))
|
|
12
|
+
if (!__hasOwnProp.call(to, key) && key !== except)
|
|
13
|
+
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
|
14
|
+
}
|
|
15
|
+
return to;
|
|
16
|
+
};
|
|
17
|
+
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
|
18
|
+
|
|
19
|
+
// src/util/shared/readDirFiles.ts
|
|
20
|
+
var readDirFiles_exports = {};
|
|
21
|
+
__export(readDirFiles_exports, {
|
|
22
|
+
readSrcFiles: () => readSrcFiles
|
|
23
|
+
});
|
|
24
|
+
module.exports = __toCommonJS(readDirFiles_exports);
|
|
25
|
+
var import_fs = require("fs");
|
|
26
|
+
var import_utils = require("@umijs/utils");
|
|
27
|
+
var import_path = require("path");
|
|
28
|
+
var exclude = [/node_modules/, /\.d\.ts$/, /\.umi/];
|
|
29
|
+
function readSrcFiles(dirOfCwd) {
|
|
30
|
+
const dir = (0, import_path.join)(process.cwd(), dirOfCwd || "src");
|
|
31
|
+
const fileItems = (0, import_utils.readDirFiles)({
|
|
32
|
+
dir,
|
|
33
|
+
exclude
|
|
34
|
+
});
|
|
35
|
+
return Promise.all(fileItems.map((item) => {
|
|
36
|
+
return {
|
|
37
|
+
filePath: item.filePath,
|
|
38
|
+
fileContent: (0, import_fs.readFileSync)(item.filePath, "utf-8")
|
|
39
|
+
};
|
|
40
|
+
}));
|
|
41
|
+
}
|
|
42
|
+
// Annotate the CommonJS export names for ESM import in node:
|
|
43
|
+
0 && (module.exports = {
|
|
44
|
+
readSrcFiles
|
|
45
|
+
});
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "js-code-detector",
|
|
3
|
-
"version": "0.0.
|
|
3
|
+
"version": "0.0.5",
|
|
4
4
|
"description": "",
|
|
5
5
|
"main": "dist/cjs/index.js",
|
|
6
6
|
"types": "dist/cjs/index.d.ts",
|
|
@@ -21,19 +21,24 @@
|
|
|
21
21
|
"bin"
|
|
22
22
|
],
|
|
23
23
|
"bin": {
|
|
24
|
-
"detect": "bin/detect.js"
|
|
24
|
+
"detect": "bin/detect.js",
|
|
25
|
+
"sameCodeDetect": "bin/sameCodeDetect.js"
|
|
25
26
|
},
|
|
26
27
|
"publishConfig": {
|
|
27
28
|
"access": "public"
|
|
28
29
|
},
|
|
29
30
|
"devDependencies": {
|
|
31
|
+
"@types/crypto-js": "^4.2.2",
|
|
32
|
+
"@types/lodash-es": "^4.17.12",
|
|
30
33
|
"@types/madge": "^5.0.3",
|
|
31
34
|
"father": "^4.6.3"
|
|
32
35
|
},
|
|
33
36
|
"dependencies": {
|
|
34
37
|
"@babel/parser": "^7.28.3",
|
|
35
38
|
"@umijs/utils": "^4.4.12",
|
|
39
|
+
"crypto-js": "^4.2.0",
|
|
36
40
|
"dayjs": "^1.11.13",
|
|
41
|
+
"lodash-es": "^4.17.21",
|
|
37
42
|
"madge": "^8.0.0",
|
|
38
43
|
"vue-eslint-parser": "^10.2.0"
|
|
39
44
|
}
|
|
File without changes
|
|
File without changes
|