js-code-detector 0.0.4 → 0.0.5

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -1,4 +1,4 @@
1
- # code-detector
1
+ # js-code-detector
2
2
 
3
3
  [![NPM version](https://img.shields.io/npm/v/code-detector.svg?style=flat)](https://npmjs.com/package/code-detector)
4
4
  [![NPM downloads](http://img.shields.io/npm/dm/code-detector.svg?style=flat)](https://npmjs.com/package/code-detector)
@@ -0,0 +1,4 @@
1
+ #!/usr/bin/env node
2
+
3
+ const args = process.argv;
4
+ require('../dist/cjs/index').sameCodeDetect(args[2]);
@@ -5,3 +5,4 @@ export declare function getGitRepositoryAndBranch(): Promise<{
5
5
  branchName: string;
6
6
  }>;
7
7
  export declare function generateReport(jsonStr: string): void;
8
+ export declare function sameCodeDetect(dirOfCwd?: string): Promise<void>;
package/dist/cjs/index.js CHANGED
@@ -31,6 +31,7 @@ var src_exports = {};
31
31
  __export(src_exports, {
32
32
  generateReport: () => generateReport,
33
33
  getGitRepositoryAndBranch: () => getGitRepositoryAndBranch,
34
+ sameCodeDetect: () => sameCodeDetect,
34
35
  umiPluginCallback: () => umiPluginCallback,
35
36
  writeGitDiffTxt: () => writeGitDiffTxt
36
37
  });
@@ -44,6 +45,8 @@ var import_report_util = require("./util/report_util");
44
45
  var import_utils = require("@umijs/utils");
45
46
  var import_dayjs = __toESM(require("dayjs"));
46
47
  var import_createMdByJson = require("./util/report_util/createMdByJson");
48
+ var import_readDirFiles = require("./util/shared/readDirFiles");
49
+ var import_Core = __toESM(require("./util/ast_util/Core"));
47
50
  var jsonName = "git_diff_report.md";
48
51
  async function umiPluginCallback(api) {
49
52
  const diff_txt = (0, import_fs.readFileSync)((0, import_path.join)(api.cwd, "git_diff.txt"), "utf-8");
@@ -108,10 +111,17 @@ async function getGitRepositoryAndBranch() {
108
111
  function generateReport(jsonStr) {
109
112
  (0, import_fs2.writeFileSync)((0, import_path.join)(process.cwd(), `${(0, import_dayjs.default)().format("YYYYMDD_HHmm")}_${jsonName}`), jsonStr, { encoding: "utf-8", flag: "w" });
110
113
  }
114
+ async function sameCodeDetect(dirOfCwd) {
115
+ const filesAndContent = await (0, import_readDirFiles.readSrcFiles)(dirOfCwd);
116
+ const { nodeContentGroupList } = import_Core.default.investigate(filesAndContent);
117
+ const md = import_Core.default.createMarkdownFile(nodeContentGroupList.slice(0, 5));
118
+ (0, import_fs2.writeFileSync)((0, import_path.join)(process.cwd(), `${(0, import_dayjs.default)().format("YYYYMDD_HHmm")}_same_code.md`), md, { encoding: "utf-8", flag: "w" });
119
+ }
111
120
  // Annotate the CommonJS export names for ESM import in node:
112
121
  0 && (module.exports = {
113
122
  generateReport,
114
123
  getGitRepositoryAndBranch,
124
+ sameCodeDetect,
115
125
  umiPluginCallback,
116
126
  writeGitDiffTxt
117
127
  });
@@ -0,0 +1,63 @@
1
+ import { Comment, Node, Identifier } from "@babel/types";
2
+ export type MapHashKeyToAstNodeSet = Map<string, Set<AstFeatNode>>;
3
+ export interface AstFeatNode {
4
+ type: Node["type"];
5
+ name?: Identifier | string;
6
+ leadingComments?: Comment[] | null;
7
+ innerComments?: Comment[] | null;
8
+ trailingComments?: Comment[] | null;
9
+ start?: number | null;
10
+ end?: number | null;
11
+ loc?: Record<'start' | 'end', {
12
+ line: number;
13
+ }> | null;
14
+ range?: [number, number];
15
+ extra?: Record<string, unknown>;
16
+ _util: {
17
+ startLine: number;
18
+ endLine: number;
19
+ mapHashKeyToString: Map<string, string>;
20
+ mapDepthDiffToHashKey: Map<number, string>;
21
+ filePath: string;
22
+ hashKey: string;
23
+ nodeCollection: AstFeatNode[];
24
+ childrenNode: AstFeatNode[];
25
+ ancestors: AstFeatNode[];
26
+ depth: number;
27
+ };
28
+ [p: string]: any;
29
+ }
30
+ export default class AstFeatUtil {
31
+ static skipHashCreateTypes: string[];
32
+ static invalidNodeKey: string[];
33
+ static AstNodeTypeConf: Map<string, {
34
+ FEATURE_LEVEL: string[];
35
+ }>;
36
+ static validRuleListOfAstNodeType: {
37
+ test: (astNode: AstFeatNode) => boolean;
38
+ FEATURE_LEVEL: string[];
39
+ }[];
40
+ static isValidNodeCollect(astNode: AstFeatNode): boolean;
41
+ static isValidArrayNodeCollect(astNode: AstFeatNode): boolean;
42
+ static createHashSeed(nodeCollection: AstFeatNode[]): string;
43
+ static createHashKey(str: string): string;
44
+ static addAncestorForNode(nodeCollection: AstFeatNode[], ancestor: AstFeatNode): void;
45
+ static deepFirstTravel(node: AstFeatNode, mapRecord: {
46
+ mapHashKeyToTopLevelNode: MapHashKeyToAstNodeSet;
47
+ nodeTypeSet: Set<string>;
48
+ }, filePath: string): AstFeatNode | undefined;
49
+ static _deepFirstTravel(node: AstFeatNode, visitedNodeSet: Set<typeof node>, mapRecord: {
50
+ mapHashKeyToTopLevelNode: MapHashKeyToAstNodeSet;
51
+ nodeTypeSet: Set<string>;
52
+ }, filePath: string, depth: number): AstFeatNode;
53
+ static updateLoc(astNode: AstFeatNode): void;
54
+ static updateNodeSetOfDepth(mapHashKeyToTopLevelNode: MapHashKeyToAstNodeSet, rootNode: AstFeatNode, nodeCollection: AstFeatNode[]): void;
55
+ static updateNodeSetByHashKey(hashKey: string, node: AstFeatNode, mapHashKeyToNode: MapHashKeyToAstNodeSet): void;
56
+ static deleteSameSubSetPartial(mapHashKeyToTopLevelNode: MapHashKeyToAstNodeSet): void;
57
+ static spreadSubNode(nodeSet: Set<AstFeatNode>, depthDiff: number): {
58
+ rootNode: AstFeatNode;
59
+ edgeNodeCollection: AstFeatNode[];
60
+ commonHashKeyNodesOrdered: AstFeatNode[][];
61
+ }[];
62
+ static createNodeHashKey(nodeList: AstFeatNode[], mapHashKeyToString: Map<string, string>): string;
63
+ }
@@ -0,0 +1,330 @@
1
+ var __create = Object.create;
2
+ var __defProp = Object.defineProperty;
3
+ var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
4
+ var __getOwnPropNames = Object.getOwnPropertyNames;
5
+ var __getProtoOf = Object.getPrototypeOf;
6
+ var __hasOwnProp = Object.prototype.hasOwnProperty;
7
+ var __export = (target, all) => {
8
+ for (var name in all)
9
+ __defProp(target, name, { get: all[name], enumerable: true });
10
+ };
11
+ var __copyProps = (to, from, except, desc) => {
12
+ if (from && typeof from === "object" || typeof from === "function") {
13
+ for (let key of __getOwnPropNames(from))
14
+ if (!__hasOwnProp.call(to, key) && key !== except)
15
+ __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
16
+ }
17
+ return to;
18
+ };
19
+ var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
20
+ // If the importer is in node compatibility mode or this is not an ESM
21
+ // file that has been converted to a CommonJS file using a Babel-
22
+ // compatible transform (i.e. "__esModule" has not been set), then set
23
+ // "default" to the CommonJS "module.exports" for node compatibility.
24
+ isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
25
+ mod
26
+ ));
27
+ var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
28
+
29
+ // src/util/ast_util/AstFeatUtil.ts
30
+ var AstFeatUtil_exports = {};
31
+ __export(AstFeatUtil_exports, {
32
+ default: () => AstFeatUtil
33
+ });
34
+ module.exports = __toCommonJS(AstFeatUtil_exports);
35
+ var import_lodash_es = require("lodash-es");
36
+ var import_sha1 = __toESM(require("crypto-js/sha1"));
37
+ var import_enc_base64 = __toESM(require("crypto-js/enc-base64"));
38
+ var import_featureLevel = require("../shared/featureLevel");
39
+ var import_astNodeFeatureExtractorMap = require("../shared/astNodeFeatureExtractorMap");
40
+ var AstFeatUtil = class {
41
+ // 单节点判断
42
+ static isValidNodeCollect(astNode) {
43
+ var _a;
44
+ const isTypeString = typeof (astNode == null ? void 0 : astNode.type) === "string";
45
+ if (!isTypeString) {
46
+ return false;
47
+ }
48
+ const ruleMatchAndPassed = this.validRuleListOfAstNodeType.find(({ test, FEATURE_LEVEL }) => {
49
+ return FEATURE_LEVEL.includes(import_featureLevel.AST_NODE_FEATURE_LEVEL.EXACT) && test(astNode);
50
+ });
51
+ if (ruleMatchAndPassed) {
52
+ return false;
53
+ }
54
+ if ((_a = this.AstNodeTypeConf.get(astNode.type)) == null ? void 0 : _a.FEATURE_LEVEL.includes(import_featureLevel.AST_NODE_FEATURE_LEVEL.EXACT)) {
55
+ return false;
56
+ }
57
+ return true;
58
+ }
59
+ // 数组节点 判断
60
+ static isValidArrayNodeCollect(astNode) {
61
+ return Array.isArray(astNode) && astNode.some((v) => typeof (v == null ? void 0 : v.type) === "string");
62
+ }
63
+ static createHashSeed(nodeCollection) {
64
+ const depthSet = /* @__PURE__ */ new Set();
65
+ nodeCollection.forEach((n) => depthSet.add(n._util.depth));
66
+ const astNodeFeatureExtractor = import_astNodeFeatureExtractorMap.astNodeFeatureExtractorMap.get(import_featureLevel.AST_NODE_FEATURE_LEVEL.EXACT);
67
+ return nodeCollection.map((n) => astNodeFeatureExtractor(n)).join(":");
68
+ }
69
+ static createHashKey(str) {
70
+ return import_enc_base64.default.stringify((0, import_sha1.default)(str));
71
+ }
72
+ static addAncestorForNode(nodeCollection, ancestor) {
73
+ nodeCollection.forEach((nodeItem) => {
74
+ nodeItem._util.ancestors.unshift(ancestor);
75
+ });
76
+ }
77
+ static deepFirstTravel(node, mapRecord, filePath) {
78
+ const visitedNodeSet = /* @__PURE__ */ new Set();
79
+ if (!node) {
80
+ return;
81
+ }
82
+ return this._deepFirstTravel(node, visitedNodeSet, mapRecord, filePath, 0);
83
+ }
84
+ static _deepFirstTravel(node, visitedNodeSet, mapRecord, filePath, depth) {
85
+ visitedNodeSet.add(node);
86
+ const { mapHashKeyToTopLevelNode, nodeTypeSet } = mapRecord;
87
+ nodeTypeSet.add(node.type);
88
+ const _util = {
89
+ startLine: NaN,
90
+ endLine: NaN,
91
+ mapHashKeyToString: /* @__PURE__ */ new Map(),
92
+ mapDepthDiffToHashKey: /* @__PURE__ */ new Map(),
93
+ filePath,
94
+ hashKey: "",
95
+ nodeCollection: [],
96
+ childrenNode: [],
97
+ ancestors: [],
98
+ depth
99
+ };
100
+ node._util = _util;
101
+ const { nodeCollection, childrenNode } = _util;
102
+ Object.keys(node).forEach((nodeKey) => {
103
+ if (this.invalidNodeKey.includes(nodeKey)) {
104
+ return;
105
+ }
106
+ const nodeValue = node[nodeKey];
107
+ if (visitedNodeSet.has(nodeValue) || !node) {
108
+ return;
109
+ }
110
+ if (this.isValidNodeCollect(nodeValue)) {
111
+ const childNode = this._deepFirstTravel(nodeValue, visitedNodeSet, mapRecord, filePath, depth + 1);
112
+ nodeCollection.push(childNode, ...childNode._util.nodeCollection);
113
+ childrenNode.push(childNode);
114
+ } else if (this.isValidArrayNodeCollect(nodeValue)) {
115
+ const validNodeArray = nodeValue.filter((nodeItem) => this.isValidNodeCollect(nodeItem)).map((v) => {
116
+ return this._deepFirstTravel(v, visitedNodeSet, mapRecord, filePath, depth + 1);
117
+ });
118
+ nodeCollection.push(...validNodeArray.map((n) => [n, ...n._util.nodeCollection]).flat());
119
+ childrenNode.push(...validNodeArray);
120
+ }
121
+ });
122
+ this.updateLoc(node);
123
+ this.addAncestorForNode(nodeCollection, node);
124
+ this.updateNodeSetOfDepth(mapHashKeyToTopLevelNode, node, nodeCollection);
125
+ return node;
126
+ }
127
+ static updateLoc(astNode) {
128
+ var _a, _b;
129
+ const { _util } = astNode;
130
+ const { nodeCollection } = _util;
131
+ _util.startLine = Math.min(...nodeCollection.map((n) => {
132
+ var _a2, _b2;
133
+ return (_b2 = (_a2 = n.loc) == null ? void 0 : _a2.start) == null ? void 0 : _b2.line;
134
+ }), (_a = astNode.loc) == null ? void 0 : _a.start.line);
135
+ _util.endLine = Math.max(...nodeCollection.map((n) => {
136
+ var _a2, _b2;
137
+ return (_b2 = (_a2 = n.loc) == null ? void 0 : _a2.end) == null ? void 0 : _b2.line;
138
+ }), (_b = astNode.loc) == null ? void 0 : _b.end.line);
139
+ }
140
+ static updateNodeSetOfDepth(mapHashKeyToTopLevelNode, rootNode, nodeCollection) {
141
+ if (this.skipHashCreateTypes.includes(rootNode.type)) {
142
+ return;
143
+ }
144
+ const { mapDepthDiffToHashKey, depth: baseDepth, mapHashKeyToString } = rootNode._util;
145
+ const maxDepth = Math.max(...nodeCollection.map((n) => n._util.depth));
146
+ if (!Number.isInteger(maxDepth)) {
147
+ return;
148
+ }
149
+ let tmp = nodeCollection;
150
+ for (let i = maxDepth; i > baseDepth; i--) {
151
+ tmp = tmp.filter((n) => n._util.depth <= i);
152
+ const hashKey = this.createNodeHashKey(tmp, mapHashKeyToString);
153
+ const depthDiff = i - baseDepth;
154
+ mapDepthDiffToHashKey.set(depthDiff, hashKey);
155
+ this.updateNodeSetByHashKey(`${depthDiff}:${hashKey}`, rootNode, mapHashKeyToTopLevelNode);
156
+ }
157
+ }
158
+ static updateNodeSetByHashKey(hashKey, node, mapHashKeyToNode) {
159
+ const oldSet = mapHashKeyToNode.get(hashKey);
160
+ if (oldSet) {
161
+ oldSet.add(node);
162
+ } else {
163
+ mapHashKeyToNode.set(hashKey, /* @__PURE__ */ new Set([node]));
164
+ }
165
+ ;
166
+ }
167
+ static deleteSameSubSetPartial(mapHashKeyToTopLevelNode) {
168
+ const shallowCopy = new Map(mapHashKeyToTopLevelNode);
169
+ for (const [hashKey, nodeSet] of shallowCopy) {
170
+ if (nodeSet.size > 1) {
171
+ const depthDiff = Number(hashKey.split(":")[0]);
172
+ const hasKeyList = [...nodeSet].map((item) => {
173
+ const parent = [...item._util.ancestors].pop();
174
+ if (parent) {
175
+ const { mapDepthDiffToHashKey } = parent._util;
176
+ const tmpHashKey = mapDepthDiffToHashKey.get(depthDiff + 1);
177
+ return tmpHashKey;
178
+ }
179
+ return void 0;
180
+ });
181
+ const only = new Set(hasKeyList).size === 1 && hasKeyList[0] !== void 0;
182
+ if (only) {
183
+ mapHashKeyToTopLevelNode.delete(hashKey);
184
+ }
185
+ }
186
+ }
187
+ }
188
+ static spreadSubNode(nodeSet, depthDiff) {
189
+ const listOfMaxDepthDiff = [];
190
+ const listOfNodeInfo = [...nodeSet].map((rootNode) => {
191
+ const baseDepth = rootNode._util.depth;
192
+ const edgeNodeCollection = rootNode._util.nodeCollection.filter((n) => n._util.depth - baseDepth === depthDiff);
193
+ const maxItemDepthDiff = Math.max(...edgeNodeCollection.map((node) => node._util.depth - baseDepth));
194
+ listOfMaxDepthDiff.push(maxItemDepthDiff);
195
+ return {
196
+ rootNode,
197
+ edgeNodeCollection,
198
+ commonHashKeyNodesOrdered: []
199
+ };
200
+ });
201
+ const maxDepth = Math.min(...listOfMaxDepthDiff);
202
+ let listOfCollection = listOfNodeInfo.map((e) => e.edgeNodeCollection);
203
+ for (let i = 1; i <= maxDepth; i++) {
204
+ const hashKeyList = listOfCollection.map((nodes) => nodes.map((node) => node._util.mapDepthDiffToHashKey.get(1)).filter(Boolean));
205
+ const commonHashKeyList = (0, import_lodash_es.intersection)(...hashKeyList);
206
+ listOfCollection = listOfCollection.map((nodes) => nodes.filter((node) => commonHashKeyList.includes(node._util.mapDepthDiffToHashKey.get(1))));
207
+ if (listOfCollection.every((collection) => collection.length > 0)) {
208
+ listOfCollection.forEach((col, index) => {
209
+ listOfNodeInfo[index].commonHashKeyNodesOrdered.push(col);
210
+ });
211
+ }
212
+ }
213
+ return listOfNodeInfo;
214
+ }
215
+ static createNodeHashKey(nodeList, mapHashKeyToString) {
216
+ const seed = this.createHashSeed(nodeList);
217
+ const hashKey = this.createHashKey(seed);
218
+ mapHashKeyToString.set(hashKey, seed);
219
+ return hashKey;
220
+ }
221
+ };
222
+ AstFeatUtil.skipHashCreateTypes = ["Program", "File", "ExportDeclaration"];
223
+ AstFeatUtil.invalidNodeKey = [
224
+ "comments",
225
+ "tokens"
226
+ ];
227
+ // 忽略的 节点类型
228
+ AstFeatUtil.AstNodeTypeConf = /* @__PURE__ */ new Map([
229
+ ["ImportDeclaration", {
230
+ FEATURE_LEVEL: [
231
+ import_featureLevel.AST_NODE_FEATURE_LEVEL.LOOSE,
232
+ import_featureLevel.AST_NODE_FEATURE_LEVEL.NORMAL,
233
+ import_featureLevel.AST_NODE_FEATURE_LEVEL.EXACT
234
+ ]
235
+ }],
236
+ ["TypeAlias", {
237
+ FEATURE_LEVEL: [
238
+ import_featureLevel.AST_NODE_FEATURE_LEVEL.LOOSE,
239
+ import_featureLevel.AST_NODE_FEATURE_LEVEL.NORMAL
240
+ ]
241
+ }],
242
+ ["VExpressionContainer", {
243
+ FEATURE_LEVEL: [
244
+ import_featureLevel.AST_NODE_FEATURE_LEVEL.LOOSE,
245
+ import_featureLevel.AST_NODE_FEATURE_LEVEL.NORMAL
246
+ ]
247
+ }],
248
+ ["VStartTag", {
249
+ FEATURE_LEVEL: [
250
+ import_featureLevel.AST_NODE_FEATURE_LEVEL.LOOSE
251
+ ]
252
+ }],
253
+ ["VEndTag", {
254
+ FEATURE_LEVEL: [
255
+ import_featureLevel.AST_NODE_FEATURE_LEVEL.LOOSE
256
+ ]
257
+ }],
258
+ ["VText", {
259
+ FEATURE_LEVEL: [
260
+ import_featureLevel.AST_NODE_FEATURE_LEVEL.LOOSE,
261
+ import_featureLevel.AST_NODE_FEATURE_LEVEL.NORMAL
262
+ ]
263
+ }],
264
+ ["VDocumentFragment", {
265
+ FEATURE_LEVEL: [
266
+ import_featureLevel.AST_NODE_FEATURE_LEVEL.LOOSE,
267
+ import_featureLevel.AST_NODE_FEATURE_LEVEL.NORMAL
268
+ ]
269
+ }],
270
+ ["CommentLine", {
271
+ FEATURE_LEVEL: [
272
+ import_featureLevel.AST_NODE_FEATURE_LEVEL.LOOSE,
273
+ import_featureLevel.AST_NODE_FEATURE_LEVEL.NORMAL,
274
+ import_featureLevel.AST_NODE_FEATURE_LEVEL.EXACT
275
+ ]
276
+ }],
277
+ ["CommentBlock", {
278
+ FEATURE_LEVEL: [
279
+ import_featureLevel.AST_NODE_FEATURE_LEVEL.LOOSE,
280
+ import_featureLevel.AST_NODE_FEATURE_LEVEL.NORMAL,
281
+ import_featureLevel.AST_NODE_FEATURE_LEVEL.EXACT
282
+ ]
283
+ }],
284
+ ["JSXIdentifier", {
285
+ FEATURE_LEVEL: [
286
+ import_featureLevel.AST_NODE_FEATURE_LEVEL.LOOSE,
287
+ import_featureLevel.AST_NODE_FEATURE_LEVEL.NORMAL
288
+ ]
289
+ }],
290
+ ["JSXAttribute", {
291
+ FEATURE_LEVEL: [
292
+ import_featureLevel.AST_NODE_FEATURE_LEVEL.LOOSE,
293
+ import_featureLevel.AST_NODE_FEATURE_LEVEL.NORMAL
294
+ ]
295
+ }],
296
+ ["EmptyStatement", {
297
+ FEATURE_LEVEL: [
298
+ import_featureLevel.AST_NODE_FEATURE_LEVEL.LOOSE,
299
+ import_featureLevel.AST_NODE_FEATURE_LEVEL.NORMAL
300
+ ]
301
+ }],
302
+ ["JSXText", {
303
+ FEATURE_LEVEL: [
304
+ import_featureLevel.AST_NODE_FEATURE_LEVEL.LOOSE,
305
+ import_featureLevel.AST_NODE_FEATURE_LEVEL.NORMAL
306
+ ]
307
+ }],
308
+ ["JSXClosingElement", {
309
+ FEATURE_LEVEL: [
310
+ import_featureLevel.AST_NODE_FEATURE_LEVEL.LOOSE,
311
+ import_featureLevel.AST_NODE_FEATURE_LEVEL.NORMAL
312
+ ]
313
+ }]
314
+ ]);
315
+ AstFeatUtil.validRuleListOfAstNodeType = [
316
+ {
317
+ test: (astNode) => /^TS\w+/.test(astNode.type),
318
+ FEATURE_LEVEL: [
319
+ import_featureLevel.AST_NODE_FEATURE_LEVEL.LOOSE,
320
+ import_featureLevel.AST_NODE_FEATURE_LEVEL.NORMAL
321
+ ]
322
+ },
323
+ {
324
+ test: (astNode) => /Literal$/.test(astNode.type),
325
+ FEATURE_LEVEL: [
326
+ import_featureLevel.AST_NODE_FEATURE_LEVEL.LOOSE,
327
+ import_featureLevel.AST_NODE_FEATURE_LEVEL.NORMAL
328
+ ]
329
+ }
330
+ ];
@@ -0,0 +1,70 @@
1
+ import { FileSegment, MapFilePathToDetail } from "./FileUtil";
2
+ import AstFeatUtil, { AstFeatNode, MapHashKeyToAstNodeSet } from "./AstFeatUtil";
3
+ export default class Core {
4
+ static createMarkdownFile(result: {
5
+ list: {
6
+ content: string;
7
+ location: string;
8
+ }[];
9
+ hashKey: string;
10
+ }[]): string;
11
+ static investigate(fileList: FileSegment[]): {
12
+ errorList: string[];
13
+ nodeTypeSet: Set<string>;
14
+ nodeContentGroupList: {
15
+ list: {
16
+ astNode: AstFeatNode;
17
+ location: string;
18
+ content: string;
19
+ }[];
20
+ listOfNodeInfo: {
21
+ rootNode: AstFeatNode;
22
+ edgeNodeCollection: AstFeatNode[];
23
+ commonHashKeyNodesOrdered: AstFeatNode[][];
24
+ }[];
25
+ nodeCount: number;
26
+ depth: number;
27
+ hashKey: string;
28
+ }[];
29
+ nodeGroupList: {
30
+ list: AstFeatNode[];
31
+ listOfNodeInfo: {
32
+ rootNode: AstFeatNode;
33
+ edgeNodeCollection: AstFeatNode[];
34
+ commonHashKeyNodesOrdered: AstFeatNode[][];
35
+ }[];
36
+ nodeCount: number;
37
+ depth: number;
38
+ hashKey: string;
39
+ }[];
40
+ countList: number[];
41
+ depthList: number[];
42
+ };
43
+ static getFileContentByLine(map: MapFilePathToDetail, filePath: string, start: number, end: number): {
44
+ location: string;
45
+ content: string;
46
+ };
47
+ static getListOfGroup(mapFilePathToDetail: MapFilePathToDetail, validFullNodeList: ReturnType<typeof Core.getListOfNodeGroup>['nodeGroupList']): {
48
+ list: {
49
+ astNode: AstFeatNode;
50
+ location: string;
51
+ content: string;
52
+ }[];
53
+ listOfNodeInfo: (typeof validFullNodeList)[number]["listOfNodeInfo"];
54
+ nodeCount: number;
55
+ depth: number;
56
+ hashKey: string;
57
+ }[];
58
+ static createMapHashKeyToAstNodeSet(fileList: FileSegment[]): readonly [MapHashKeyToAstNodeSet, Set<string>, string[]];
59
+ static getListOfNodeGroup(mapHashKeyToNodeSet: MapHashKeyToAstNodeSet): {
60
+ nodeGroupList: {
61
+ list: AstFeatNode[];
62
+ listOfNodeInfo: ReturnType<typeof AstFeatUtil.spreadSubNode>;
63
+ nodeCount: number;
64
+ depth: number;
65
+ hashKey: string;
66
+ }[];
67
+ countList: number[];
68
+ depthList: number[];
69
+ };
70
+ }
@@ -0,0 +1,193 @@
1
+ var __create = Object.create;
2
+ var __defProp = Object.defineProperty;
3
+ var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
4
+ var __getOwnPropNames = Object.getOwnPropertyNames;
5
+ var __getProtoOf = Object.getPrototypeOf;
6
+ var __hasOwnProp = Object.prototype.hasOwnProperty;
7
+ var __export = (target, all) => {
8
+ for (var name in all)
9
+ __defProp(target, name, { get: all[name], enumerable: true });
10
+ };
11
+ var __copyProps = (to, from, except, desc) => {
12
+ if (from && typeof from === "object" || typeof from === "function") {
13
+ for (let key of __getOwnPropNames(from))
14
+ if (!__hasOwnProp.call(to, key) && key !== except)
15
+ __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
16
+ }
17
+ return to;
18
+ };
19
+ var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
20
+ // If the importer is in node compatibility mode or this is not an ESM
21
+ // file that has been converted to a CommonJS file using a Babel-
22
+ // compatible transform (i.e. "__esModule" has not been set), then set
23
+ // "default" to the CommonJS "module.exports" for node compatibility.
24
+ isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
25
+ mod
26
+ ));
27
+ var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
28
+
29
+ // src/util/ast_util/Core.ts
30
+ var Core_exports = {};
31
+ __export(Core_exports, {
32
+ default: () => Core
33
+ });
34
+ module.exports = __toCommonJS(Core_exports);
35
+ var import_lodash_es = require("lodash-es");
36
+ var import_FileUtil = __toESM(require("./FileUtil"));
37
+ var import_AstFeatUtil = __toESM(require("./AstFeatUtil"));
38
+ var Core = class {
39
+ static createMarkdownFile(result) {
40
+ let str = "# 共找到" + result.length + "组相似内容\n";
41
+ result.forEach((item, i) => {
42
+ const { list, hashKey } = item;
43
+ const depth = hashKey.split(":")[0];
44
+ str += `## 第${i + 1}组, 共${list.length}段, 匹配深度 ${depth}
45
+ `;
46
+ list.forEach(({ content, location }, index) => {
47
+ const filepath = location.split(/\s+/)[0];
48
+ let ext = import_FileUtil.extensions.find((e) => filepath.includes(e));
49
+ ext = (ext == null ? void 0 : ext.slice(1)) || "";
50
+ str += `### 第${index + 1}段
51
+ > ${location}
52
+ \`\`\`${ext}
53
+ ${content}
54
+ \`\`\`
55
+
56
+ `;
57
+ });
58
+ });
59
+ return str;
60
+ }
61
+ static investigate(fileList) {
62
+ const mapFilePathToDetail = import_FileUtil.default.createMapFilePathToDetail(fileList);
63
+ const [mapHashKeyToTopLevelNode, nodeTypeSet, errorList] = this.createMapHashKeyToAstNodeSet(fileList);
64
+ const { nodeGroupList } = this.getListOfNodeGroup(mapHashKeyToTopLevelNode);
65
+ const nodeContentGroupList = this.getListOfGroup(mapFilePathToDetail, nodeGroupList);
66
+ return {
67
+ errorList,
68
+ nodeTypeSet,
69
+ nodeContentGroupList,
70
+ nodeGroupList,
71
+ countList: nodeContentGroupList.map((e) => e.nodeCount).sort((a, b) => a - b),
72
+ depthList: nodeContentGroupList.map((e) => e.depth).sort((a, b) => a - b)
73
+ };
74
+ }
75
+ static getFileContentByLine(map, filePath, start, end) {
76
+ const fixedStart = Math.max(start - 1, 0);
77
+ const content = map.get(filePath).lines.slice(fixedStart, end).join("\n");
78
+ const location = `${filePath}: from line ${start} to line ${end}`;
79
+ return { location, content };
80
+ }
81
+ static getListOfGroup(mapFilePathToDetail, validFullNodeList) {
82
+ const listOfGroup = [];
83
+ const locationStrSet = /* @__PURE__ */ new Set();
84
+ for (const item of validFullNodeList) {
85
+ const { listOfNodeInfo } = item;
86
+ const newList = listOfNodeInfo.map(({ rootNode, edgeNodeCollection }) => {
87
+ var _a;
88
+ const { filePath, startLine, endLine } = rootNode._util;
89
+ const linesNums = (0, import_lodash_es.range)(startLine, endLine + 1);
90
+ const edgeNodesLines = edgeNodeCollection.map((edgeNode) => {
91
+ const { startLine: itemStartLine, endLine: itemEndLine } = edgeNode._util;
92
+ if (itemStartLine === startLine || itemEndLine === endLine) {
93
+ return [];
94
+ }
95
+ return itemStartLine + 1 >= itemEndLine ? [] : (0, import_lodash_es.range)(itemStartLine + 1, itemEndLine);
96
+ }).flat();
97
+ const listOfRatedContent = [
98
+ {
99
+ linesNums: [],
100
+ lines: [],
101
+ rate: 0
102
+ }
103
+ ];
104
+ const lines = ((_a = mapFilePathToDetail.get(filePath)) == null ? void 0 : _a.lines) || [];
105
+ if (edgeNodesLines.length > 0) {
106
+ for (let i = startLine; i <= endLine; i++) {
107
+ const flag = edgeNodesLines.includes(i);
108
+ const rate = flag ? 1 : 0;
109
+ let lastRatedContent = listOfRatedContent[listOfRatedContent.length - 1];
110
+ if (lastRatedContent.rate !== rate) {
111
+ listOfRatedContent.push(lastRatedContent = {
112
+ linesNums: [],
113
+ lines: [],
114
+ rate
115
+ });
116
+ }
117
+ lastRatedContent.lines.push(lines[i - 1]);
118
+ lastRatedContent.linesNums.push(i);
119
+ }
120
+ } else {
121
+ listOfRatedContent.push({ linesNums, lines: lines.slice(Math.max(startLine - 1, 0), endLine), rate: 0 });
122
+ }
123
+ const { location, content } = this.getFileContentByLine(mapFilePathToDetail, filePath, startLine, endLine);
124
+ return { location, content, astNode: rootNode, listOfRatedContent };
125
+ });
126
+ const locationStr = newList.map((e) => e.location).join();
127
+ if (!locationStrSet.has(locationStr)) {
128
+ locationStrSet.add(locationStr);
129
+ const { nodeCount, depth, hashKey, listOfNodeInfo: listOfNodeInfo2 } = item;
130
+ listOfGroup.push({
131
+ nodeCount,
132
+ depth,
133
+ hashKey,
134
+ listOfNodeInfo: listOfNodeInfo2,
135
+ list: newList
136
+ });
137
+ }
138
+ }
139
+ return listOfGroup;
140
+ }
141
+ static createMapHashKeyToAstNodeSet(fileList) {
142
+ const mapHashKeyToTopLevelNode = /* @__PURE__ */ new Map();
143
+ const nodeTypeSet = /* @__PURE__ */ new Set();
144
+ const errorList = [];
145
+ fileList.forEach((file) => {
146
+ const { filePath, fileContent } = file;
147
+ const [errorMsg, parsedNode] = import_FileUtil.default.parseFile(filePath, fileContent);
148
+ if (parsedNode) {
149
+ AstUtil.deepFirstTravel(parsedNode, {
150
+ mapHashKeyToTopLevelNode,
151
+ nodeTypeSet
152
+ }, filePath);
153
+ } else if (errorMsg) {
154
+ errorList.push(errorMsg);
155
+ }
156
+ });
157
+ import_AstFeatUtil.default.deleteSameSubSetPartial(mapHashKeyToTopLevelNode);
158
+ return [mapHashKeyToTopLevelNode, nodeTypeSet, errorList];
159
+ }
160
+ static getListOfNodeGroup(mapHashKeyToNodeSet) {
161
+ const nodeGroupList = [];
162
+ const countPartialSet = /* @__PURE__ */ new Set();
163
+ const depthSet = /* @__PURE__ */ new Set();
164
+ for (const [hashKey, nodeSet] of mapHashKeyToNodeSet) {
165
+ if (nodeSet.size < 2) {
166
+ continue;
167
+ }
168
+ const baseNode = [...nodeSet][0];
169
+ let nodeCount = 0;
170
+ const index = hashKey.indexOf(":");
171
+ const depth = Number(hashKey.slice(0, index));
172
+ depthSet.add(depth);
173
+ const baseDepth = baseNode._util.depth;
174
+ nodeCount = baseNode._util.nodeCollection.filter((n) => n._util.depth - baseDepth <= depth).length;
175
+ countPartialSet.add(nodeCount);
176
+ const listOfNodeInfo = import_AstFeatUtil.default.spreadSubNode(nodeSet, depth);
177
+ nodeGroupList.push({
178
+ list: [...nodeSet],
179
+ listOfNodeInfo,
180
+ nodeCount,
181
+ depth,
182
+ hashKey
183
+ });
184
+ }
185
+ const countList = [...countPartialSet].sort((a, b) => a - b);
186
+ const depthList = [...depthSet].sort((a, b) => a - b);
187
+ return {
188
+ nodeGroupList,
189
+ countList,
190
+ depthList
191
+ };
192
+ }
193
+ };
@@ -1,6 +1,6 @@
1
1
  import * as babelParse from "@babel/parser";
2
2
  import * as vueParse from "vue-eslint-parser";
3
- import { AstNode } from "./AstUtil";
3
+ import { AstFeatNode } from "./AstFeatUtil";
4
4
  export declare const extensionsOfJs: string[];
5
5
  export declare const extensions: string[];
6
6
  export type MapFilePathToDetail = Map<string, FileSegment & {
@@ -14,6 +14,6 @@ export default class FileUtil {
14
14
  static parseVue(filePath: string, fileContent: string): vueParse.AST.ESLintProgram;
15
15
  static parseJsxLike(filePath: string, fileContent: string): babelParse.ParseResult<import("@babel/types").File>;
16
16
  static createMapFilePathToDetail(list: FileSegment[]): MapFilePathToDetail;
17
- static parseFile(filePath: string, fileContent: string): [string, Omit<AstNode, '_util'> | null];
18
- static getASTByFilePath(filePath: string): Omit<AstNode, "_util"> | null;
17
+ static parseFile(filePath: string, fileContent: string): [string, Omit<AstFeatNode, '_util'> | null];
18
+ static getASTByFilePath(filePath: string): Omit<AstFeatNode, "_util"> | null;
19
19
  }
@@ -31,7 +31,7 @@ export declare function createDetectReport(arg: Arg): {
31
31
  kind: "Import" | "Declaration" | "Assignment" | "SelfUpdate" | "Invoke" | "Other" | "Never";
32
32
  }[];
33
33
  filePath: string;
34
- type: "modify" | "add" | "delete";
34
+ type: "add" | "delete" | "modify";
35
35
  filesDependsOnMe: string[];
36
36
  dangerIdentifiers: string[];
37
37
  }[];
@@ -0,0 +1,2 @@
1
+ import { AstFeatNode } from "../ast_util/AstFeatUtil";
2
+ export declare const astNodeFeatureExtractorMap: Map<string, (a: AstFeatNode) => string>;
@@ -0,0 +1,59 @@
1
+ var __defProp = Object.defineProperty;
2
+ var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
3
+ var __getOwnPropNames = Object.getOwnPropertyNames;
4
+ var __hasOwnProp = Object.prototype.hasOwnProperty;
5
+ var __export = (target, all) => {
6
+ for (var name in all)
7
+ __defProp(target, name, { get: all[name], enumerable: true });
8
+ };
9
+ var __copyProps = (to, from, except, desc) => {
10
+ if (from && typeof from === "object" || typeof from === "function") {
11
+ for (let key of __getOwnPropNames(from))
12
+ if (!__hasOwnProp.call(to, key) && key !== except)
13
+ __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
14
+ }
15
+ return to;
16
+ };
17
+ var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
18
+
19
+ // src/util/shared/astNodeFeatureExtractorMap.ts
20
+ var astNodeFeatureExtractorMap_exports = {};
21
+ __export(astNodeFeatureExtractorMap_exports, {
22
+ astNodeFeatureExtractorMap: () => astNodeFeatureExtractorMap
23
+ });
24
+ module.exports = __toCommonJS(astNodeFeatureExtractorMap_exports);
25
+ var import_featureLevel = require("./featureLevel");
26
+ var astNodeFeatureExtractorMap = /* @__PURE__ */ new Map([
27
+ [
28
+ import_featureLevel.AST_NODE_FEATURE_LEVEL.LOOSE,
29
+ (a) => a.type
30
+ ],
31
+ [
32
+ import_featureLevel.AST_NODE_FEATURE_LEVEL.NORMAL,
33
+ (n) => {
34
+ if (typeof n.name === "object") {
35
+ return n.type + ":" + n.name.name;
36
+ } else if (typeof n.name === "string") {
37
+ return n.type + ":" + n.name;
38
+ }
39
+ return n.type;
40
+ }
41
+ ],
42
+ [
43
+ import_featureLevel.AST_NODE_FEATURE_LEVEL.EXACT,
44
+ (n) => {
45
+ if (typeof n.name === "object") {
46
+ return n.type + ":" + n.name.name;
47
+ } else if (typeof n.name === "string") {
48
+ return n.type + ":" + n.name;
49
+ } else if (n.extra && "raw" in n.extra) {
50
+ return n.type + ":" + n.extra.raw;
51
+ }
52
+ return n.type;
53
+ }
54
+ ]
55
+ ]);
56
+ // Annotate the CommonJS export names for ESM import in node:
57
+ 0 && (module.exports = {
58
+ astNodeFeatureExtractorMap
59
+ });
@@ -0,0 +1,5 @@
1
+ export declare const AST_NODE_FEATURE_LEVEL: {
2
+ LOOSE: string;
3
+ NORMAL: string;
4
+ EXACT: string;
5
+ };
@@ -0,0 +1,33 @@
1
+ var __defProp = Object.defineProperty;
2
+ var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
3
+ var __getOwnPropNames = Object.getOwnPropertyNames;
4
+ var __hasOwnProp = Object.prototype.hasOwnProperty;
5
+ var __export = (target, all) => {
6
+ for (var name in all)
7
+ __defProp(target, name, { get: all[name], enumerable: true });
8
+ };
9
+ var __copyProps = (to, from, except, desc) => {
10
+ if (from && typeof from === "object" || typeof from === "function") {
11
+ for (let key of __getOwnPropNames(from))
12
+ if (!__hasOwnProp.call(to, key) && key !== except)
13
+ __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
14
+ }
15
+ return to;
16
+ };
17
+ var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
18
+
19
+ // src/util/shared/featureLevel.ts
20
+ var featureLevel_exports = {};
21
+ __export(featureLevel_exports, {
22
+ AST_NODE_FEATURE_LEVEL: () => AST_NODE_FEATURE_LEVEL
23
+ });
24
+ module.exports = __toCommonJS(featureLevel_exports);
25
+ var AST_NODE_FEATURE_LEVEL = {
26
+ LOOSE: "LOOSE",
27
+ NORMAL: "NORMAL",
28
+ EXACT: "EXACT"
29
+ };
30
+ // Annotate the CommonJS export names for ESM import in node:
31
+ 0 && (module.exports = {
32
+ AST_NODE_FEATURE_LEVEL
33
+ });
@@ -0,0 +1,4 @@
1
+ export declare function readSrcFiles(dirOfCwd?: string): Promise<{
2
+ filePath: string;
3
+ fileContent: string;
4
+ }[]>;
@@ -0,0 +1,45 @@
1
+ var __defProp = Object.defineProperty;
2
+ var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
3
+ var __getOwnPropNames = Object.getOwnPropertyNames;
4
+ var __hasOwnProp = Object.prototype.hasOwnProperty;
5
+ var __export = (target, all) => {
6
+ for (var name in all)
7
+ __defProp(target, name, { get: all[name], enumerable: true });
8
+ };
9
+ var __copyProps = (to, from, except, desc) => {
10
+ if (from && typeof from === "object" || typeof from === "function") {
11
+ for (let key of __getOwnPropNames(from))
12
+ if (!__hasOwnProp.call(to, key) && key !== except)
13
+ __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
14
+ }
15
+ return to;
16
+ };
17
+ var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
18
+
19
+ // src/util/shared/readDirFiles.ts
20
+ var readDirFiles_exports = {};
21
+ __export(readDirFiles_exports, {
22
+ readSrcFiles: () => readSrcFiles
23
+ });
24
+ module.exports = __toCommonJS(readDirFiles_exports);
25
+ var import_fs = require("fs");
26
+ var import_utils = require("@umijs/utils");
27
+ var import_path = require("path");
28
+ var exclude = [/node_modules/, /\.d\.ts$/, /\.umi/];
29
+ function readSrcFiles(dirOfCwd) {
30
+ const dir = (0, import_path.join)(process.cwd(), dirOfCwd || "src");
31
+ const fileItems = (0, import_utils.readDirFiles)({
32
+ dir,
33
+ exclude
34
+ });
35
+ return Promise.all(fileItems.map((item) => {
36
+ return {
37
+ filePath: item.filePath,
38
+ fileContent: (0, import_fs.readFileSync)(item.filePath, "utf-8")
39
+ };
40
+ }));
41
+ }
42
+ // Annotate the CommonJS export names for ESM import in node:
43
+ 0 && (module.exports = {
44
+ readSrcFiles
45
+ });
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "js-code-detector",
3
- "version": "0.0.4",
3
+ "version": "0.0.5",
4
4
  "description": "",
5
5
  "main": "dist/cjs/index.js",
6
6
  "types": "dist/cjs/index.d.ts",
@@ -21,19 +21,24 @@
21
21
  "bin"
22
22
  ],
23
23
  "bin": {
24
- "detect": "bin/detect.js"
24
+ "detect": "bin/detect.js",
25
+ "sameCodeDetect": "bin/sameCodeDetect.js"
25
26
  },
26
27
  "publishConfig": {
27
28
  "access": "public"
28
29
  },
29
30
  "devDependencies": {
31
+ "@types/crypto-js": "^4.2.2",
32
+ "@types/lodash-es": "^4.17.12",
30
33
  "@types/madge": "^5.0.3",
31
34
  "father": "^4.6.3"
32
35
  },
33
36
  "dependencies": {
34
37
  "@babel/parser": "^7.28.3",
35
38
  "@umijs/utils": "^4.4.12",
39
+ "crypto-js": "^4.2.0",
36
40
  "dayjs": "^1.11.13",
41
+ "lodash-es": "^4.17.21",
37
42
  "madge": "^8.0.0",
38
43
  "vue-eslint-parser": "^10.2.0"
39
44
  }
File without changes
File without changes