@aiready/pattern-detect 0.1.3 → 0.2.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +149 -38
- package/dist/{chunk-N5DE7IYX.mjs → chunk-AXHGYYYZ.mjs} +5 -17
- package/dist/{chunk-57O7FEEM.mjs → chunk-JKVKOXYR.mjs} +43 -36
- package/dist/{chunk-DNI7S33V.mjs → chunk-OFGMDX66.mjs} +34 -31
- package/dist/{chunk-4CZGZIDL.mjs → chunk-QE4E3F7C.mjs} +20 -19
- package/dist/{chunk-ZNZ5O435.mjs → chunk-TXWPOVYU.mjs} +37 -35
- package/dist/cli.js +45 -55
- package/dist/cli.mjs +17 -18
- package/dist/index.d.mts +5 -10
- package/dist/index.d.ts +5 -10
- package/dist/index.js +29 -38
- package/dist/index.mjs +1 -1
- package/package.json +11 -11
- package/dist/chunk-6VQTQRDW.mjs +0 -245
- package/dist/chunk-JTJXOIO2.mjs +0 -378
- package/dist/chunk-K5O2HVB5.mjs +0 -114
- package/dist/chunk-RLWJXASG.mjs +0 -227
- package/dist/chunk-YA3N6EC5.mjs +0 -351
package/dist/chunk-JTJXOIO2.mjs
DELETED
|
@@ -1,378 +0,0 @@
|
|
|
1
|
-
// src/index.ts
|
|
2
|
-
import { scanFiles, readFileContent } from "@aiready/core";
|
|
3
|
-
|
|
4
|
-
// src/detector.ts
|
|
5
|
-
import { similarityScore, estimateTokens } from "@aiready/core";
|
|
6
|
-
function categorizePattern(code) {
|
|
7
|
-
const lower = code.toLowerCase();
|
|
8
|
-
if (lower.includes("request") && lower.includes("response") || lower.includes("router.") || lower.includes("app.get") || lower.includes("app.post") || lower.includes("express") || lower.includes("ctx.body")) {
|
|
9
|
-
return "api-handler";
|
|
10
|
-
}
|
|
11
|
-
if (lower.includes("validate") || lower.includes("schema") || lower.includes("zod") || lower.includes("yup") || lower.includes("if") && lower.includes("throw")) {
|
|
12
|
-
return "validator";
|
|
13
|
-
}
|
|
14
|
-
if (lower.includes("return (") || lower.includes("jsx") || lower.includes("component") || lower.includes("props")) {
|
|
15
|
-
return "component";
|
|
16
|
-
}
|
|
17
|
-
if (lower.includes("class ") || lower.includes("this.")) {
|
|
18
|
-
return "class-method";
|
|
19
|
-
}
|
|
20
|
-
if (lower.includes("return ") && !lower.includes("this") && !lower.includes("new ")) {
|
|
21
|
-
return "utility";
|
|
22
|
-
}
|
|
23
|
-
if (lower.includes("function") || lower.includes("=>")) {
|
|
24
|
-
return "function";
|
|
25
|
-
}
|
|
26
|
-
return "unknown";
|
|
27
|
-
}
|
|
28
|
-
function extractCodeBlocks(content, minLines) {
|
|
29
|
-
const lines = content.split("\n");
|
|
30
|
-
const blocks = [];
|
|
31
|
-
let currentBlock = [];
|
|
32
|
-
let blockStart = 0;
|
|
33
|
-
let braceDepth = 0;
|
|
34
|
-
let inFunction = false;
|
|
35
|
-
for (let i = 0; i < lines.length; i++) {
|
|
36
|
-
const line = lines[i];
|
|
37
|
-
const trimmed = line.trim();
|
|
38
|
-
if (!inFunction && (trimmed.includes("function ") || trimmed.includes("=>") || trimmed.includes("async ") || /^(export\s+)?(async\s+)?function\s+/.test(trimmed) || /^(export\s+)?const\s+\w+\s*=\s*(async\s*)?\(/.test(trimmed))) {
|
|
39
|
-
inFunction = true;
|
|
40
|
-
blockStart = i;
|
|
41
|
-
}
|
|
42
|
-
for (const char of line) {
|
|
43
|
-
if (char === "{") braceDepth++;
|
|
44
|
-
if (char === "}") braceDepth--;
|
|
45
|
-
}
|
|
46
|
-
if (inFunction) {
|
|
47
|
-
currentBlock.push(line);
|
|
48
|
-
}
|
|
49
|
-
if (inFunction && braceDepth === 0 && currentBlock.length >= minLines) {
|
|
50
|
-
const blockContent = currentBlock.join("\n");
|
|
51
|
-
const linesOfCode = currentBlock.filter(
|
|
52
|
-
(l) => l.trim() && !l.trim().startsWith("//")
|
|
53
|
-
).length;
|
|
54
|
-
blocks.push({
|
|
55
|
-
content: blockContent,
|
|
56
|
-
startLine: blockStart + 1,
|
|
57
|
-
patternType: categorizePattern(blockContent),
|
|
58
|
-
linesOfCode
|
|
59
|
-
});
|
|
60
|
-
currentBlock = [];
|
|
61
|
-
inFunction = false;
|
|
62
|
-
} else if (inFunction && braceDepth === 0) {
|
|
63
|
-
currentBlock = [];
|
|
64
|
-
inFunction = false;
|
|
65
|
-
}
|
|
66
|
-
}
|
|
67
|
-
return blocks;
|
|
68
|
-
}
|
|
69
|
-
function normalizeCode(code) {
|
|
70
|
-
return code.replace(/\/\/.*$/gm, "").replace(/\/\*[\s\S]*?\*\//g, "").replace(/"[^"]*"/g, '"STR"').replace(/'[^']*'/g, "'STR'").replace(/`[^`]*`/g, "`STR`").replace(/\b\d+\b/g, "NUM").replace(/\s+/g, " ").trim();
|
|
71
|
-
}
|
|
72
|
-
function jaccardSimilarity(tokens1, tokens2) {
|
|
73
|
-
const set1 = new Set(tokens1);
|
|
74
|
-
const set2 = new Set(tokens2);
|
|
75
|
-
let intersection = 0;
|
|
76
|
-
for (const token of set1) {
|
|
77
|
-
if (set2.has(token)) intersection++;
|
|
78
|
-
}
|
|
79
|
-
const union = set1.size + set2.size - intersection;
|
|
80
|
-
return union === 0 ? 0 : intersection / union;
|
|
81
|
-
}
|
|
82
|
-
function calculateSimilarity(block1, block2) {
|
|
83
|
-
const norm1 = normalizeCode(block1);
|
|
84
|
-
const norm2 = normalizeCode(block2);
|
|
85
|
-
const baseSimilarity = similarityScore(norm1, norm2);
|
|
86
|
-
const tokens1 = norm1.split(/[\s(){}[\];,]+/).filter(Boolean);
|
|
87
|
-
const tokens2 = norm2.split(/[\s(){}[\];,]+/).filter(Boolean);
|
|
88
|
-
const tokenSimilarity = similarityScore(tokens1.join(" "), tokens2.join(" "));
|
|
89
|
-
return baseSimilarity * 0.4 + tokenSimilarity * 0.6;
|
|
90
|
-
}
|
|
91
|
-
async function detectDuplicatePatterns(files, options) {
|
|
92
|
-
const {
|
|
93
|
-
minSimilarity,
|
|
94
|
-
minLines,
|
|
95
|
-
maxBlocks = 500,
|
|
96
|
-
batchSize = 100,
|
|
97
|
-
approx = true,
|
|
98
|
-
minSharedTokens = 8,
|
|
99
|
-
maxCandidatesPerBlock = 100,
|
|
100
|
-
fastMode = true,
|
|
101
|
-
maxComparisons = 5e4
|
|
102
|
-
// Cap at 50K comparisons by default
|
|
103
|
-
} = options;
|
|
104
|
-
const duplicates = [];
|
|
105
|
-
let allBlocks = files.flatMap(
|
|
106
|
-
(file) => extractCodeBlocks(file.content, minLines).map((block) => ({
|
|
107
|
-
...block,
|
|
108
|
-
file: file.file,
|
|
109
|
-
normalized: normalizeCode(block.content),
|
|
110
|
-
tokenCost: estimateTokens(block.content)
|
|
111
|
-
}))
|
|
112
|
-
);
|
|
113
|
-
console.log(`Extracted ${allBlocks.length} code blocks for analysis`);
|
|
114
|
-
if (allBlocks.length > maxBlocks) {
|
|
115
|
-
console.log(`\u26A0\uFE0F Limiting to ${maxBlocks} blocks (sorted by size) to prevent memory issues`);
|
|
116
|
-
console.log(` Use --max-blocks to increase limit or --min-lines to filter smaller blocks`);
|
|
117
|
-
allBlocks = allBlocks.sort((a, b) => b.linesOfCode - a.linesOfCode).slice(0, maxBlocks);
|
|
118
|
-
}
|
|
119
|
-
const stopwords = /* @__PURE__ */ new Set([
|
|
120
|
-
"return",
|
|
121
|
-
"const",
|
|
122
|
-
"let",
|
|
123
|
-
"var",
|
|
124
|
-
"function",
|
|
125
|
-
"class",
|
|
126
|
-
"new",
|
|
127
|
-
"if",
|
|
128
|
-
"else",
|
|
129
|
-
"for",
|
|
130
|
-
"while",
|
|
131
|
-
"async",
|
|
132
|
-
"await",
|
|
133
|
-
"try",
|
|
134
|
-
"catch",
|
|
135
|
-
"switch",
|
|
136
|
-
"case",
|
|
137
|
-
"default",
|
|
138
|
-
"import",
|
|
139
|
-
"export",
|
|
140
|
-
"from",
|
|
141
|
-
"true",
|
|
142
|
-
"false",
|
|
143
|
-
"null",
|
|
144
|
-
"undefined",
|
|
145
|
-
"this"
|
|
146
|
-
]);
|
|
147
|
-
const tokenize = (norm) => norm.split(/[\s(){}\[\];,\.]+/).filter((t) => t && t.length >= 3 && !stopwords.has(t.toLowerCase()));
|
|
148
|
-
const blockTokens = allBlocks.map((b) => tokenize(b.normalized));
|
|
149
|
-
const invertedIndex = /* @__PURE__ */ new Map();
|
|
150
|
-
if (approx) {
|
|
151
|
-
for (let i = 0; i < blockTokens.length; i++) {
|
|
152
|
-
for (const tok of blockTokens[i]) {
|
|
153
|
-
let arr = invertedIndex.get(tok);
|
|
154
|
-
if (!arr) {
|
|
155
|
-
arr = [];
|
|
156
|
-
invertedIndex.set(tok, arr);
|
|
157
|
-
}
|
|
158
|
-
arr.push(i);
|
|
159
|
-
}
|
|
160
|
-
}
|
|
161
|
-
}
|
|
162
|
-
const totalComparisons = approx ? void 0 : allBlocks.length * (allBlocks.length - 1) / 2;
|
|
163
|
-
if (totalComparisons !== void 0) {
|
|
164
|
-
console.log(`Processing ${totalComparisons.toLocaleString()} comparisons in batches...`);
|
|
165
|
-
} else {
|
|
166
|
-
console.log(`Using approximate candidate selection to reduce comparisons...`);
|
|
167
|
-
}
|
|
168
|
-
let comparisonsProcessed = 0;
|
|
169
|
-
let comparisonsBudgetExhausted = false;
|
|
170
|
-
const startTime = Date.now();
|
|
171
|
-
for (let i = 0; i < allBlocks.length; i++) {
|
|
172
|
-
if (maxComparisons && comparisonsProcessed >= maxComparisons) {
|
|
173
|
-
comparisonsBudgetExhausted = true;
|
|
174
|
-
break;
|
|
175
|
-
}
|
|
176
|
-
if (i % batchSize === 0 && i > 0) {
|
|
177
|
-
const elapsed = ((Date.now() - startTime) / 1e3).toFixed(1);
|
|
178
|
-
if (totalComparisons !== void 0) {
|
|
179
|
-
const progress = (comparisonsProcessed / totalComparisons * 100).toFixed(1);
|
|
180
|
-
console.log(` ${progress}% complete (${comparisonsProcessed.toLocaleString()}/${totalComparisons.toLocaleString()} comparisons, ${elapsed}s elapsed)`);
|
|
181
|
-
} else {
|
|
182
|
-
console.log(` Processed ${i.toLocaleString()} blocks (${elapsed}s elapsed)`);
|
|
183
|
-
}
|
|
184
|
-
await new Promise((resolve) => setImmediate(resolve));
|
|
185
|
-
}
|
|
186
|
-
const block1 = allBlocks[i];
|
|
187
|
-
let candidates = null;
|
|
188
|
-
if (approx) {
|
|
189
|
-
const counts = /* @__PURE__ */ new Map();
|
|
190
|
-
for (const tok of blockTokens[i]) {
|
|
191
|
-
const ids = invertedIndex.get(tok);
|
|
192
|
-
if (!ids) continue;
|
|
193
|
-
for (const j of ids) {
|
|
194
|
-
if (j <= i) continue;
|
|
195
|
-
if (allBlocks[j].file === block1.file) continue;
|
|
196
|
-
counts.set(j, (counts.get(j) || 0) + 1);
|
|
197
|
-
}
|
|
198
|
-
}
|
|
199
|
-
candidates = Array.from(counts.entries()).filter(([, shared]) => shared >= minSharedTokens).sort((a, b) => b[1] - a[1]).slice(0, maxCandidatesPerBlock).map(([j, shared]) => ({ j, shared }));
|
|
200
|
-
}
|
|
201
|
-
if (approx && candidates) {
|
|
202
|
-
for (const { j } of candidates) {
|
|
203
|
-
if (maxComparisons && comparisonsProcessed >= maxComparisons) break;
|
|
204
|
-
comparisonsProcessed++;
|
|
205
|
-
const block2 = allBlocks[j];
|
|
206
|
-
const similarity = fastMode ? jaccardSimilarity(blockTokens[i], blockTokens[j]) : calculateSimilarity(block1.content, block2.content);
|
|
207
|
-
if (similarity >= minSimilarity) {
|
|
208
|
-
duplicates.push({
|
|
209
|
-
file1: block1.file,
|
|
210
|
-
file2: block2.file,
|
|
211
|
-
line1: block1.startLine,
|
|
212
|
-
line2: block2.startLine,
|
|
213
|
-
similarity,
|
|
214
|
-
snippet: block1.content.split("\n").slice(0, 5).join("\n") + "\n...",
|
|
215
|
-
patternType: block1.patternType,
|
|
216
|
-
tokenCost: block1.tokenCost + block2.tokenCost,
|
|
217
|
-
linesOfCode: block1.linesOfCode
|
|
218
|
-
});
|
|
219
|
-
}
|
|
220
|
-
}
|
|
221
|
-
} else {
|
|
222
|
-
for (let j = i + 1; j < allBlocks.length; j++) {
|
|
223
|
-
if (maxComparisons && comparisonsProcessed >= maxComparisons) break;
|
|
224
|
-
comparisonsProcessed++;
|
|
225
|
-
const block2 = allBlocks[j];
|
|
226
|
-
if (block1.file === block2.file) continue;
|
|
227
|
-
const similarity = fastMode ? jaccardSimilarity(blockTokens[i], blockTokens[j]) : calculateSimilarity(block1.content, block2.content);
|
|
228
|
-
if (similarity >= minSimilarity) {
|
|
229
|
-
duplicates.push({
|
|
230
|
-
file1: block1.file,
|
|
231
|
-
file2: block2.file,
|
|
232
|
-
line1: block1.startLine,
|
|
233
|
-
line2: block2.startLine,
|
|
234
|
-
similarity,
|
|
235
|
-
snippet: block1.content.split("\n").slice(0, 5).join("\n") + "\n...",
|
|
236
|
-
patternType: block1.patternType,
|
|
237
|
-
tokenCost: block1.tokenCost + block2.tokenCost,
|
|
238
|
-
linesOfCode: block1.linesOfCode
|
|
239
|
-
});
|
|
240
|
-
}
|
|
241
|
-
}
|
|
242
|
-
}
|
|
243
|
-
}
|
|
244
|
-
if (comparisonsBudgetExhausted) {
|
|
245
|
-
console.log(`\u26A0\uFE0F Comparison budget exhausted (${maxComparisons.toLocaleString()} comparisons). Use --max-comparisons to increase.`);
|
|
246
|
-
}
|
|
247
|
-
return duplicates.sort(
|
|
248
|
-
(a, b) => b.similarity - a.similarity || b.tokenCost - a.tokenCost
|
|
249
|
-
);
|
|
250
|
-
}
|
|
251
|
-
|
|
252
|
-
// src/index.ts
|
|
253
|
-
function getRefactoringSuggestion(patternType, similarity) {
|
|
254
|
-
const baseMessages = {
|
|
255
|
-
"api-handler": "Extract common middleware or create a base handler class",
|
|
256
|
-
validator: "Consolidate validation logic into shared schema validators (Zod/Yup)",
|
|
257
|
-
utility: "Move to a shared utilities file and reuse across modules",
|
|
258
|
-
"class-method": "Consider inheritance or composition to share behavior",
|
|
259
|
-
component: "Extract shared logic into a custom hook or HOC",
|
|
260
|
-
function: "Extract into a shared helper function",
|
|
261
|
-
unknown: "Extract common logic into a reusable module"
|
|
262
|
-
};
|
|
263
|
-
const urgency = similarity > 0.95 ? " (CRITICAL: Nearly identical code)" : similarity > 0.9 ? " (HIGH: Very similar, refactor soon)" : "";
|
|
264
|
-
return baseMessages[patternType] + urgency;
|
|
265
|
-
}
|
|
266
|
-
async function analyzePatterns(options) {
|
|
267
|
-
const {
|
|
268
|
-
minSimilarity = 0.85,
|
|
269
|
-
minLines = 5,
|
|
270
|
-
maxBlocks = 500,
|
|
271
|
-
batchSize = 100,
|
|
272
|
-
approx = true,
|
|
273
|
-
minSharedTokens = 8,
|
|
274
|
-
maxCandidatesPerBlock = 100,
|
|
275
|
-
fastMode = true,
|
|
276
|
-
maxComparisons = 5e4,
|
|
277
|
-
...scanOptions
|
|
278
|
-
} = options;
|
|
279
|
-
const files = await scanFiles(scanOptions);
|
|
280
|
-
const results = [];
|
|
281
|
-
const fileContents = await Promise.all(
|
|
282
|
-
files.map(async (file) => ({
|
|
283
|
-
file,
|
|
284
|
-
content: await readFileContent(file)
|
|
285
|
-
}))
|
|
286
|
-
);
|
|
287
|
-
const duplicates = await detectDuplicatePatterns(fileContents, {
|
|
288
|
-
minSimilarity,
|
|
289
|
-
minLines,
|
|
290
|
-
maxBlocks,
|
|
291
|
-
batchSize,
|
|
292
|
-
approx,
|
|
293
|
-
minSharedTokens,
|
|
294
|
-
maxCandidatesPerBlock,
|
|
295
|
-
fastMode,
|
|
296
|
-
maxComparisons
|
|
297
|
-
});
|
|
298
|
-
for (const file of files) {
|
|
299
|
-
const fileDuplicates = duplicates.filter(
|
|
300
|
-
(dup) => dup.file1 === file || dup.file2 === file
|
|
301
|
-
);
|
|
302
|
-
const issues = fileDuplicates.map((dup) => {
|
|
303
|
-
const otherFile = dup.file1 === file ? dup.file2 : dup.file1;
|
|
304
|
-
const severity = dup.similarity > 0.95 ? "critical" : dup.similarity > 0.9 ? "major" : "minor";
|
|
305
|
-
return {
|
|
306
|
-
type: "duplicate-pattern",
|
|
307
|
-
severity,
|
|
308
|
-
message: `${dup.patternType} pattern ${Math.round(dup.similarity * 100)}% similar to ${otherFile} (${dup.tokenCost} tokens wasted)`,
|
|
309
|
-
location: {
|
|
310
|
-
file,
|
|
311
|
-
line: dup.file1 === file ? dup.line1 : dup.line2
|
|
312
|
-
},
|
|
313
|
-
suggestion: getRefactoringSuggestion(dup.patternType, dup.similarity)
|
|
314
|
-
};
|
|
315
|
-
});
|
|
316
|
-
const totalTokenCost = fileDuplicates.reduce(
|
|
317
|
-
(sum, dup) => sum + dup.tokenCost,
|
|
318
|
-
0
|
|
319
|
-
);
|
|
320
|
-
results.push({
|
|
321
|
-
fileName: file,
|
|
322
|
-
issues,
|
|
323
|
-
metrics: {
|
|
324
|
-
tokenCost: totalTokenCost,
|
|
325
|
-
consistencyScore: Math.max(0, 1 - fileDuplicates.length * 0.1)
|
|
326
|
-
}
|
|
327
|
-
});
|
|
328
|
-
}
|
|
329
|
-
return results;
|
|
330
|
-
}
|
|
331
|
-
function generateSummary(results) {
|
|
332
|
-
const allIssues = results.flatMap((r) => r.issues);
|
|
333
|
-
const totalTokenCost = results.reduce(
|
|
334
|
-
(sum, r) => sum + (r.metrics.tokenCost || 0),
|
|
335
|
-
0
|
|
336
|
-
);
|
|
337
|
-
const patternsByType = {
|
|
338
|
-
"api-handler": 0,
|
|
339
|
-
validator: 0,
|
|
340
|
-
utility: 0,
|
|
341
|
-
"class-method": 0,
|
|
342
|
-
component: 0,
|
|
343
|
-
function: 0,
|
|
344
|
-
unknown: 0
|
|
345
|
-
};
|
|
346
|
-
allIssues.forEach((issue) => {
|
|
347
|
-
const match = issue.message.match(/^(\S+(?:-\S+)*) pattern/);
|
|
348
|
-
if (match) {
|
|
349
|
-
const type = match[1];
|
|
350
|
-
patternsByType[type] = (patternsByType[type] || 0) + 1;
|
|
351
|
-
}
|
|
352
|
-
});
|
|
353
|
-
const topDuplicates = allIssues.slice(0, 10).map((issue) => {
|
|
354
|
-
const similarityMatch = issue.message.match(/(\d+)% similar/);
|
|
355
|
-
const tokenMatch = issue.message.match(/\((\d+) tokens/);
|
|
356
|
-
const typeMatch = issue.message.match(/^(\S+(?:-\S+)*) pattern/);
|
|
357
|
-
const fileMatch = issue.message.match(/similar to (.+?) \(/);
|
|
358
|
-
return {
|
|
359
|
-
file1: issue.location.file,
|
|
360
|
-
file2: fileMatch?.[1] || "unknown",
|
|
361
|
-
similarity: similarityMatch ? parseInt(similarityMatch[1]) / 100 : 0,
|
|
362
|
-
patternType: typeMatch?.[1] || "unknown",
|
|
363
|
-
tokenCost: tokenMatch ? parseInt(tokenMatch[1]) : 0
|
|
364
|
-
};
|
|
365
|
-
});
|
|
366
|
-
return {
|
|
367
|
-
totalPatterns: allIssues.length,
|
|
368
|
-
totalTokenCost,
|
|
369
|
-
patternsByType,
|
|
370
|
-
topDuplicates
|
|
371
|
-
};
|
|
372
|
-
}
|
|
373
|
-
|
|
374
|
-
export {
|
|
375
|
-
detectDuplicatePatterns,
|
|
376
|
-
analyzePatterns,
|
|
377
|
-
generateSummary
|
|
378
|
-
};
|
package/dist/chunk-K5O2HVB5.mjs
DELETED
|
@@ -1,114 +0,0 @@
|
|
|
1
|
-
// src/index.ts
|
|
2
|
-
import { scanFiles, readFileContent } from "@aiready/core";
|
|
3
|
-
|
|
4
|
-
// src/detector.ts
|
|
5
|
-
import { similarityScore } from "@aiready/core";
|
|
6
|
-
function extractCodeBlocks(content, minLines) {
|
|
7
|
-
const lines = content.split("\n");
|
|
8
|
-
const blocks = [];
|
|
9
|
-
let currentBlock = [];
|
|
10
|
-
let blockStart = 0;
|
|
11
|
-
let braceDepth = 0;
|
|
12
|
-
for (let i = 0; i < lines.length; i++) {
|
|
13
|
-
const line = lines[i];
|
|
14
|
-
for (const char of line) {
|
|
15
|
-
if (char === "{") braceDepth++;
|
|
16
|
-
if (char === "}") braceDepth--;
|
|
17
|
-
}
|
|
18
|
-
currentBlock.push(line);
|
|
19
|
-
if (braceDepth === 0 && currentBlock.length >= minLines) {
|
|
20
|
-
blocks.push({
|
|
21
|
-
content: currentBlock.join("\n"),
|
|
22
|
-
startLine: blockStart + 1
|
|
23
|
-
});
|
|
24
|
-
currentBlock = [];
|
|
25
|
-
blockStart = i + 1;
|
|
26
|
-
} else if (braceDepth === 0) {
|
|
27
|
-
currentBlock = [];
|
|
28
|
-
blockStart = i + 1;
|
|
29
|
-
}
|
|
30
|
-
}
|
|
31
|
-
return blocks;
|
|
32
|
-
}
|
|
33
|
-
function normalizeCode(code) {
|
|
34
|
-
return code.replace(/\/\/.*$/gm, "").replace(/\/\*[\s\S]*?\*\//g, "").replace(/\s+/g, " ").trim();
|
|
35
|
-
}
|
|
36
|
-
function detectDuplicatePatterns(files, options) {
|
|
37
|
-
const { minSimilarity, minLines } = options;
|
|
38
|
-
const duplicates = [];
|
|
39
|
-
const allBlocks = files.flatMap(
|
|
40
|
-
(file) => extractCodeBlocks(file.content, minLines).map((block) => ({
|
|
41
|
-
...block,
|
|
42
|
-
file: file.file,
|
|
43
|
-
normalized: normalizeCode(block.content)
|
|
44
|
-
}))
|
|
45
|
-
);
|
|
46
|
-
for (let i = 0; i < allBlocks.length; i++) {
|
|
47
|
-
for (let j = i + 1; j < allBlocks.length; j++) {
|
|
48
|
-
const block1 = allBlocks[i];
|
|
49
|
-
const block2 = allBlocks[j];
|
|
50
|
-
if (block1.file === block2.file) continue;
|
|
51
|
-
const similarity = similarityScore(block1.normalized, block2.normalized);
|
|
52
|
-
if (similarity >= minSimilarity) {
|
|
53
|
-
duplicates.push({
|
|
54
|
-
file1: block1.file,
|
|
55
|
-
file2: block2.file,
|
|
56
|
-
line1: block1.startLine,
|
|
57
|
-
line2: block2.startLine,
|
|
58
|
-
similarity,
|
|
59
|
-
snippet: block1.content.split("\n").slice(0, 3).join("\n") + "..."
|
|
60
|
-
});
|
|
61
|
-
}
|
|
62
|
-
}
|
|
63
|
-
}
|
|
64
|
-
return duplicates.sort((a, b) => b.similarity - a.similarity);
|
|
65
|
-
}
|
|
66
|
-
|
|
67
|
-
// src/index.ts
|
|
68
|
-
async function analyzePatterns(options) {
|
|
69
|
-
const {
|
|
70
|
-
minSimilarity = 0.85,
|
|
71
|
-
minLines = 5,
|
|
72
|
-
...scanOptions
|
|
73
|
-
} = options;
|
|
74
|
-
const files = await scanFiles(scanOptions);
|
|
75
|
-
const results = [];
|
|
76
|
-
const fileContents = await Promise.all(
|
|
77
|
-
files.map(async (file) => ({
|
|
78
|
-
file,
|
|
79
|
-
content: await readFileContent(file)
|
|
80
|
-
}))
|
|
81
|
-
);
|
|
82
|
-
const duplicates = detectDuplicatePatterns(fileContents, {
|
|
83
|
-
minSimilarity,
|
|
84
|
-
minLines
|
|
85
|
-
});
|
|
86
|
-
for (const file of files) {
|
|
87
|
-
const fileDuplicates = duplicates.filter(
|
|
88
|
-
(dup) => dup.file1 === file || dup.file2 === file
|
|
89
|
-
);
|
|
90
|
-
const issues = fileDuplicates.map((dup) => ({
|
|
91
|
-
type: "duplicate-pattern",
|
|
92
|
-
severity: dup.similarity > 0.95 ? "critical" : "major",
|
|
93
|
-
message: `Similar pattern found in ${dup.file1 === file ? dup.file2 : dup.file1}`,
|
|
94
|
-
location: {
|
|
95
|
-
file,
|
|
96
|
-
line: dup.file1 === file ? dup.line1 : dup.line2
|
|
97
|
-
},
|
|
98
|
-
suggestion: "Consider extracting common logic into a shared utility"
|
|
99
|
-
}));
|
|
100
|
-
results.push({
|
|
101
|
-
fileName: file,
|
|
102
|
-
issues,
|
|
103
|
-
metrics: {
|
|
104
|
-
consistencyScore: 1 - fileDuplicates.length * 0.1
|
|
105
|
-
}
|
|
106
|
-
});
|
|
107
|
-
}
|
|
108
|
-
return results;
|
|
109
|
-
}
|
|
110
|
-
|
|
111
|
-
export {
|
|
112
|
-
detectDuplicatePatterns,
|
|
113
|
-
analyzePatterns
|
|
114
|
-
};
|