@aiready/pattern-detect 0.7.0 → 0.7.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/chunk-VRMXVYDZ.mjs +419 -0
- package/dist/cli.js +2 -19
- package/dist/cli.mjs +2 -20
- package/dist/index.d.mts +5 -1
- package/dist/index.d.ts +5 -1
- package/dist/index.js +1 -1
- package/dist/index.mjs +1 -1
- package/package.json +1 -1
|
@@ -0,0 +1,419 @@
|
|
|
1
|
+
// src/index.ts
|
|
2
|
+
import { scanFiles, readFileContent } from "@aiready/core";
|
|
3
|
+
|
|
4
|
+
// src/detector.ts
|
|
5
|
+
import { estimateTokens } from "@aiready/core";
|
|
6
|
+
function categorizePattern(code) {
|
|
7
|
+
const lower = code.toLowerCase();
|
|
8
|
+
if (lower.includes("request") && lower.includes("response") || lower.includes("router.") || lower.includes("app.get") || lower.includes("app.post") || lower.includes("express") || lower.includes("ctx.body")) {
|
|
9
|
+
return "api-handler";
|
|
10
|
+
}
|
|
11
|
+
if (lower.includes("validate") || lower.includes("schema") || lower.includes("zod") || lower.includes("yup") || lower.includes("if") && lower.includes("throw")) {
|
|
12
|
+
return "validator";
|
|
13
|
+
}
|
|
14
|
+
if (lower.includes("return (") || lower.includes("jsx") || lower.includes("component") || lower.includes("props")) {
|
|
15
|
+
return "component";
|
|
16
|
+
}
|
|
17
|
+
if (lower.includes("class ") || lower.includes("this.")) {
|
|
18
|
+
return "class-method";
|
|
19
|
+
}
|
|
20
|
+
if (lower.includes("return ") && !lower.includes("this") && !lower.includes("new ")) {
|
|
21
|
+
return "utility";
|
|
22
|
+
}
|
|
23
|
+
if (lower.includes("function") || lower.includes("=>")) {
|
|
24
|
+
return "function";
|
|
25
|
+
}
|
|
26
|
+
return "unknown";
|
|
27
|
+
}
|
|
28
|
+
function extractCodeBlocks(content, minLines) {
|
|
29
|
+
const lines = content.split("\n");
|
|
30
|
+
const blocks = [];
|
|
31
|
+
let currentBlock = [];
|
|
32
|
+
let blockStart = 0;
|
|
33
|
+
let braceDepth = 0;
|
|
34
|
+
let inFunction = false;
|
|
35
|
+
for (let i = 0; i < lines.length; i++) {
|
|
36
|
+
const line = lines[i];
|
|
37
|
+
const trimmed = line.trim();
|
|
38
|
+
if (!inFunction && (trimmed.includes("function ") || trimmed.includes("=>") || trimmed.includes("async ") || /^(export\s+)?(async\s+)?function\s+/.test(trimmed) || /^(export\s+)?const\s+\w+\s*=\s*(async\s*)?\(/.test(trimmed))) {
|
|
39
|
+
inFunction = true;
|
|
40
|
+
blockStart = i;
|
|
41
|
+
}
|
|
42
|
+
for (const char of line) {
|
|
43
|
+
if (char === "{") braceDepth++;
|
|
44
|
+
if (char === "}") braceDepth--;
|
|
45
|
+
}
|
|
46
|
+
if (inFunction) {
|
|
47
|
+
currentBlock.push(line);
|
|
48
|
+
}
|
|
49
|
+
if (inFunction && braceDepth === 0 && currentBlock.length >= minLines) {
|
|
50
|
+
const blockContent = currentBlock.join("\n");
|
|
51
|
+
const linesOfCode = currentBlock.filter(
|
|
52
|
+
(l) => l.trim() && !l.trim().startsWith("//")
|
|
53
|
+
).length;
|
|
54
|
+
blocks.push({
|
|
55
|
+
content: blockContent,
|
|
56
|
+
startLine: blockStart + 1,
|
|
57
|
+
endLine: i + 1,
|
|
58
|
+
patternType: categorizePattern(blockContent),
|
|
59
|
+
linesOfCode
|
|
60
|
+
});
|
|
61
|
+
currentBlock = [];
|
|
62
|
+
inFunction = false;
|
|
63
|
+
} else if (inFunction && braceDepth === 0) {
|
|
64
|
+
currentBlock = [];
|
|
65
|
+
inFunction = false;
|
|
66
|
+
}
|
|
67
|
+
}
|
|
68
|
+
return blocks;
|
|
69
|
+
}
|
|
70
|
+
function normalizeCode(code) {
|
|
71
|
+
return code.replace(/\/\/.*$/gm, "").replace(/\/\*[\s\S]*?\*\//g, "").replace(/"[^"]*"/g, '"STR"').replace(/'[^']*'/g, "'STR'").replace(/`[^`]*`/g, "`STR`").replace(/\b\d+\b/g, "NUM").replace(/\s+/g, " ").trim();
|
|
72
|
+
}
|
|
73
|
+
function jaccardSimilarity(tokens1, tokens2) {
|
|
74
|
+
const set1 = new Set(tokens1);
|
|
75
|
+
const set2 = new Set(tokens2);
|
|
76
|
+
let intersection = 0;
|
|
77
|
+
for (const token of set1) {
|
|
78
|
+
if (set2.has(token)) intersection++;
|
|
79
|
+
}
|
|
80
|
+
const union = set1.size + set2.size - intersection;
|
|
81
|
+
return union === 0 ? 0 : intersection / union;
|
|
82
|
+
}
|
|
83
|
+
async function detectDuplicatePatterns(files, options) {
|
|
84
|
+
const {
|
|
85
|
+
minSimilarity,
|
|
86
|
+
minLines,
|
|
87
|
+
batchSize = 100,
|
|
88
|
+
approx = true,
|
|
89
|
+
minSharedTokens = 8,
|
|
90
|
+
maxCandidatesPerBlock = 100,
|
|
91
|
+
streamResults = false
|
|
92
|
+
} = options;
|
|
93
|
+
const duplicates = [];
|
|
94
|
+
const maxComparisons = approx ? Infinity : 5e5;
|
|
95
|
+
const allBlocks = files.flatMap(
|
|
96
|
+
(file) => extractCodeBlocks(file.content, minLines).map((block) => ({
|
|
97
|
+
content: block.content,
|
|
98
|
+
startLine: block.startLine,
|
|
99
|
+
endLine: block.endLine,
|
|
100
|
+
file: file.file,
|
|
101
|
+
normalized: normalizeCode(block.content),
|
|
102
|
+
patternType: block.patternType,
|
|
103
|
+
tokenCost: estimateTokens(block.content),
|
|
104
|
+
linesOfCode: block.linesOfCode
|
|
105
|
+
}))
|
|
106
|
+
);
|
|
107
|
+
console.log(`Extracted ${allBlocks.length} code blocks for analysis`);
|
|
108
|
+
if (!approx && allBlocks.length > 500) {
|
|
109
|
+
console.log(`\u26A0\uFE0F Using --no-approx mode with ${allBlocks.length} blocks may be slow (O(B\xB2) complexity).`);
|
|
110
|
+
console.log(` Consider using approximate mode (default) for better performance.`);
|
|
111
|
+
}
|
|
112
|
+
const stopwords = /* @__PURE__ */ new Set([
|
|
113
|
+
"return",
|
|
114
|
+
"const",
|
|
115
|
+
"let",
|
|
116
|
+
"var",
|
|
117
|
+
"function",
|
|
118
|
+
"class",
|
|
119
|
+
"new",
|
|
120
|
+
"if",
|
|
121
|
+
"else",
|
|
122
|
+
"for",
|
|
123
|
+
"while",
|
|
124
|
+
"async",
|
|
125
|
+
"await",
|
|
126
|
+
"try",
|
|
127
|
+
"catch",
|
|
128
|
+
"switch",
|
|
129
|
+
"case",
|
|
130
|
+
"default",
|
|
131
|
+
"import",
|
|
132
|
+
"export",
|
|
133
|
+
"from",
|
|
134
|
+
"true",
|
|
135
|
+
"false",
|
|
136
|
+
"null",
|
|
137
|
+
"undefined",
|
|
138
|
+
"this"
|
|
139
|
+
]);
|
|
140
|
+
const tokenize = (norm) => norm.split(/[\s(){}\[\];,\.]+/).filter((t) => t && t.length >= 3 && !stopwords.has(t.toLowerCase()));
|
|
141
|
+
const blockTokens = allBlocks.map((b) => tokenize(b.normalized));
|
|
142
|
+
const invertedIndex = /* @__PURE__ */ new Map();
|
|
143
|
+
if (approx) {
|
|
144
|
+
for (let i = 0; i < blockTokens.length; i++) {
|
|
145
|
+
for (const tok of blockTokens[i]) {
|
|
146
|
+
let arr = invertedIndex.get(tok);
|
|
147
|
+
if (!arr) {
|
|
148
|
+
arr = [];
|
|
149
|
+
invertedIndex.set(tok, arr);
|
|
150
|
+
}
|
|
151
|
+
arr.push(i);
|
|
152
|
+
}
|
|
153
|
+
}
|
|
154
|
+
}
|
|
155
|
+
const totalComparisons = approx ? void 0 : allBlocks.length * (allBlocks.length - 1) / 2;
|
|
156
|
+
if (totalComparisons !== void 0) {
|
|
157
|
+
console.log(`Processing ${totalComparisons.toLocaleString()} comparisons in batches...`);
|
|
158
|
+
} else {
|
|
159
|
+
console.log(`Using approximate candidate selection to reduce comparisons...`);
|
|
160
|
+
}
|
|
161
|
+
let comparisonsProcessed = 0;
|
|
162
|
+
let comparisonsBudgetExhausted = false;
|
|
163
|
+
const startTime = Date.now();
|
|
164
|
+
for (let i = 0; i < allBlocks.length; i++) {
|
|
165
|
+
if (maxComparisons && comparisonsProcessed >= maxComparisons) {
|
|
166
|
+
comparisonsBudgetExhausted = true;
|
|
167
|
+
break;
|
|
168
|
+
}
|
|
169
|
+
if (i % batchSize === 0 && i > 0) {
|
|
170
|
+
const elapsed = ((Date.now() - startTime) / 1e3).toFixed(1);
|
|
171
|
+
const duplicatesFound = duplicates.length;
|
|
172
|
+
if (totalComparisons !== void 0) {
|
|
173
|
+
const progress = (comparisonsProcessed / totalComparisons * 100).toFixed(1);
|
|
174
|
+
const remaining = totalComparisons - comparisonsProcessed;
|
|
175
|
+
const rate = comparisonsProcessed / parseFloat(elapsed);
|
|
176
|
+
const eta = remaining > 0 ? (remaining / rate).toFixed(0) : 0;
|
|
177
|
+
console.log(` ${progress}% (${comparisonsProcessed.toLocaleString()}/${totalComparisons.toLocaleString()} comparisons, ${elapsed}s elapsed, ~${eta}s remaining, ${duplicatesFound} duplicates)`);
|
|
178
|
+
} else {
|
|
179
|
+
console.log(` Processed ${i.toLocaleString()}/${allBlocks.length} blocks (${elapsed}s elapsed, ${duplicatesFound} duplicates)`);
|
|
180
|
+
}
|
|
181
|
+
await new Promise((resolve) => setImmediate(resolve));
|
|
182
|
+
}
|
|
183
|
+
const block1 = allBlocks[i];
|
|
184
|
+
let candidates = null;
|
|
185
|
+
if (approx) {
|
|
186
|
+
const counts = /* @__PURE__ */ new Map();
|
|
187
|
+
for (const tok of blockTokens[i]) {
|
|
188
|
+
const ids = invertedIndex.get(tok);
|
|
189
|
+
if (!ids) continue;
|
|
190
|
+
for (const j of ids) {
|
|
191
|
+
if (j <= i) continue;
|
|
192
|
+
if (allBlocks[j].file === block1.file) continue;
|
|
193
|
+
counts.set(j, (counts.get(j) || 0) + 1);
|
|
194
|
+
}
|
|
195
|
+
}
|
|
196
|
+
candidates = Array.from(counts.entries()).filter(([, shared]) => shared >= minSharedTokens).sort((a, b) => b[1] - a[1]).slice(0, maxCandidatesPerBlock).map(([j, shared]) => ({ j, shared }));
|
|
197
|
+
}
|
|
198
|
+
if (approx && candidates) {
|
|
199
|
+
for (const { j } of candidates) {
|
|
200
|
+
if (!approx && maxComparisons !== Infinity && comparisonsProcessed >= maxComparisons) {
|
|
201
|
+
console.log(`\u26A0\uFE0F Comparison safety limit reached (${maxComparisons.toLocaleString()} comparisons in --no-approx mode).`);
|
|
202
|
+
console.log(` This prevents excessive runtime on large repos. Consider using approximate mode (default) or --min-lines to reduce blocks.`);
|
|
203
|
+
break;
|
|
204
|
+
}
|
|
205
|
+
comparisonsProcessed++;
|
|
206
|
+
const block2 = allBlocks[j];
|
|
207
|
+
const similarity = jaccardSimilarity(blockTokens[i], blockTokens[j]);
|
|
208
|
+
if (similarity >= minSimilarity) {
|
|
209
|
+
const duplicate = {
|
|
210
|
+
file1: block1.file,
|
|
211
|
+
file2: block2.file,
|
|
212
|
+
line1: block1.startLine,
|
|
213
|
+
line2: block2.startLine,
|
|
214
|
+
endLine1: block1.endLine,
|
|
215
|
+
endLine2: block2.endLine,
|
|
216
|
+
similarity,
|
|
217
|
+
snippet: block1.content.split("\n").slice(0, 5).join("\n") + "\n...",
|
|
218
|
+
patternType: block1.patternType,
|
|
219
|
+
tokenCost: block1.tokenCost + block2.tokenCost,
|
|
220
|
+
linesOfCode: block1.linesOfCode
|
|
221
|
+
};
|
|
222
|
+
duplicates.push(duplicate);
|
|
223
|
+
if (streamResults) {
|
|
224
|
+
console.log(`
|
|
225
|
+
\u2705 Found: ${duplicate.patternType} ${Math.round(similarity * 100)}% similar`);
|
|
226
|
+
console.log(` ${duplicate.file1}:${duplicate.line1}-${duplicate.endLine1} \u21D4 ${duplicate.file2}:${duplicate.line2}-${duplicate.endLine2}`);
|
|
227
|
+
console.log(` Token cost: ${duplicate.tokenCost.toLocaleString()}`);
|
|
228
|
+
}
|
|
229
|
+
}
|
|
230
|
+
}
|
|
231
|
+
} else {
|
|
232
|
+
for (let j = i + 1; j < allBlocks.length; j++) {
|
|
233
|
+
if (maxComparisons && comparisonsProcessed >= maxComparisons) break;
|
|
234
|
+
comparisonsProcessed++;
|
|
235
|
+
const block2 = allBlocks[j];
|
|
236
|
+
if (block1.file === block2.file) continue;
|
|
237
|
+
const similarity = jaccardSimilarity(blockTokens[i], blockTokens[j]);
|
|
238
|
+
if (similarity >= minSimilarity) {
|
|
239
|
+
const duplicate = {
|
|
240
|
+
file1: block1.file,
|
|
241
|
+
file2: block2.file,
|
|
242
|
+
line1: block1.startLine,
|
|
243
|
+
line2: block2.startLine,
|
|
244
|
+
endLine1: block1.endLine,
|
|
245
|
+
endLine2: block2.endLine,
|
|
246
|
+
similarity,
|
|
247
|
+
snippet: block1.content.split("\n").slice(0, 5).join("\n") + "\n...",
|
|
248
|
+
patternType: block1.patternType,
|
|
249
|
+
tokenCost: block1.tokenCost + block2.tokenCost,
|
|
250
|
+
linesOfCode: block1.linesOfCode
|
|
251
|
+
};
|
|
252
|
+
duplicates.push(duplicate);
|
|
253
|
+
if (streamResults) {
|
|
254
|
+
console.log(`
|
|
255
|
+
\u2705 Found: ${duplicate.patternType} ${Math.round(similarity * 100)}% similar`);
|
|
256
|
+
console.log(` ${duplicate.file1}:${duplicate.line1}-${duplicate.endLine1} \u21D4 ${duplicate.file2}:${duplicate.line2}-${duplicate.endLine2}`);
|
|
257
|
+
console.log(` Token cost: ${duplicate.tokenCost.toLocaleString()}`);
|
|
258
|
+
}
|
|
259
|
+
}
|
|
260
|
+
}
|
|
261
|
+
}
|
|
262
|
+
}
|
|
263
|
+
if (comparisonsBudgetExhausted) {
|
|
264
|
+
console.log(`\u26A0\uFE0F Comparison budget exhausted (${maxComparisons.toLocaleString()} comparisons). Use --max-comparisons to increase.`);
|
|
265
|
+
}
|
|
266
|
+
return duplicates.sort(
|
|
267
|
+
(a, b) => b.similarity - a.similarity || b.tokenCost - a.tokenCost
|
|
268
|
+
);
|
|
269
|
+
}
|
|
270
|
+
|
|
271
|
+
// src/index.ts
|
|
272
|
+
function getRefactoringSuggestion(patternType, similarity) {
|
|
273
|
+
const baseMessages = {
|
|
274
|
+
"api-handler": "Extract common middleware or create a base handler class",
|
|
275
|
+
validator: "Consolidate validation logic into shared schema validators (Zod/Yup)",
|
|
276
|
+
utility: "Move to a shared utilities file and reuse across modules",
|
|
277
|
+
"class-method": "Consider inheritance or composition to share behavior",
|
|
278
|
+
component: "Extract shared logic into a custom hook or HOC",
|
|
279
|
+
function: "Extract into a shared helper function",
|
|
280
|
+
unknown: "Extract common logic into a reusable module"
|
|
281
|
+
};
|
|
282
|
+
const urgency = similarity > 0.95 ? " (CRITICAL: Nearly identical code)" : similarity > 0.9 ? " (HIGH: Very similar, refactor soon)" : "";
|
|
283
|
+
return baseMessages[patternType] + urgency;
|
|
284
|
+
}
|
|
285
|
+
async function analyzePatterns(options) {
|
|
286
|
+
const {
|
|
287
|
+
minSimilarity = 0.4,
|
|
288
|
+
// Jaccard similarity default (40% threshold)
|
|
289
|
+
minLines = 5,
|
|
290
|
+
batchSize = 100,
|
|
291
|
+
approx = true,
|
|
292
|
+
minSharedTokens = 8,
|
|
293
|
+
maxCandidatesPerBlock = 100,
|
|
294
|
+
streamResults = false,
|
|
295
|
+
severity = "all",
|
|
296
|
+
includeTests = false,
|
|
297
|
+
...scanOptions
|
|
298
|
+
} = options;
|
|
299
|
+
const files = await scanFiles(scanOptions);
|
|
300
|
+
const results = [];
|
|
301
|
+
const fileContents = await Promise.all(
|
|
302
|
+
files.map(async (file) => ({
|
|
303
|
+
file,
|
|
304
|
+
content: await readFileContent(file)
|
|
305
|
+
}))
|
|
306
|
+
);
|
|
307
|
+
const duplicates = await detectDuplicatePatterns(fileContents, {
|
|
308
|
+
minSimilarity,
|
|
309
|
+
minLines,
|
|
310
|
+
batchSize,
|
|
311
|
+
approx,
|
|
312
|
+
minSharedTokens,
|
|
313
|
+
maxCandidatesPerBlock,
|
|
314
|
+
streamResults
|
|
315
|
+
});
|
|
316
|
+
for (const file of files) {
|
|
317
|
+
const fileDuplicates = duplicates.filter(
|
|
318
|
+
(dup) => dup.file1 === file || dup.file2 === file
|
|
319
|
+
);
|
|
320
|
+
const issues = fileDuplicates.map((dup) => {
|
|
321
|
+
const otherFile = dup.file1 === file ? dup.file2 : dup.file1;
|
|
322
|
+
const severity2 = dup.similarity > 0.95 ? "critical" : dup.similarity > 0.9 ? "major" : "minor";
|
|
323
|
+
return {
|
|
324
|
+
type: "duplicate-pattern",
|
|
325
|
+
severity: severity2,
|
|
326
|
+
message: `${dup.patternType} pattern ${Math.round(dup.similarity * 100)}% similar to ${otherFile} (${dup.tokenCost} tokens wasted)`,
|
|
327
|
+
location: {
|
|
328
|
+
file,
|
|
329
|
+
line: dup.file1 === file ? dup.line1 : dup.line2
|
|
330
|
+
},
|
|
331
|
+
suggestion: getRefactoringSuggestion(dup.patternType, dup.similarity)
|
|
332
|
+
};
|
|
333
|
+
});
|
|
334
|
+
let filteredIssues = issues;
|
|
335
|
+
if (severity !== "all") {
|
|
336
|
+
const severityMap = {
|
|
337
|
+
critical: ["critical"],
|
|
338
|
+
high: ["critical", "major"],
|
|
339
|
+
medium: ["critical", "major", "minor"]
|
|
340
|
+
};
|
|
341
|
+
const allowedSeverities = severityMap[severity] || ["critical", "major", "minor"];
|
|
342
|
+
filteredIssues = issues.filter((issue) => allowedSeverities.includes(issue.severity));
|
|
343
|
+
}
|
|
344
|
+
const totalTokenCost = fileDuplicates.reduce(
|
|
345
|
+
(sum, dup) => sum + dup.tokenCost,
|
|
346
|
+
0
|
|
347
|
+
);
|
|
348
|
+
results.push({
|
|
349
|
+
fileName: file,
|
|
350
|
+
issues: filteredIssues,
|
|
351
|
+
metrics: {
|
|
352
|
+
tokenCost: totalTokenCost,
|
|
353
|
+
consistencyScore: Math.max(0, 1 - fileDuplicates.length * 0.1)
|
|
354
|
+
}
|
|
355
|
+
});
|
|
356
|
+
}
|
|
357
|
+
return { results, duplicates, files };
|
|
358
|
+
}
|
|
359
|
+
function generateSummary(results) {
|
|
360
|
+
const allIssues = results.flatMap((r) => r.issues);
|
|
361
|
+
const totalTokenCost = results.reduce(
|
|
362
|
+
(sum, r) => sum + (r.metrics.tokenCost || 0),
|
|
363
|
+
0
|
|
364
|
+
);
|
|
365
|
+
const patternsByType = {
|
|
366
|
+
"api-handler": 0,
|
|
367
|
+
validator: 0,
|
|
368
|
+
utility: 0,
|
|
369
|
+
"class-method": 0,
|
|
370
|
+
component: 0,
|
|
371
|
+
function: 0,
|
|
372
|
+
unknown: 0
|
|
373
|
+
};
|
|
374
|
+
allIssues.forEach((issue) => {
|
|
375
|
+
const match = issue.message.match(/^(\S+(?:-\S+)*) pattern/);
|
|
376
|
+
if (match) {
|
|
377
|
+
const type = match[1];
|
|
378
|
+
patternsByType[type] = (patternsByType[type] || 0) + 1;
|
|
379
|
+
}
|
|
380
|
+
});
|
|
381
|
+
const topDuplicates = allIssues.slice(0, 10).map((issue) => {
|
|
382
|
+
const similarityMatch = issue.message.match(/(\d+)% similar/);
|
|
383
|
+
const tokenMatch = issue.message.match(/\((\d+) tokens/);
|
|
384
|
+
const typeMatch = issue.message.match(/^(\S+(?:-\S+)*) pattern/);
|
|
385
|
+
const fileMatch = issue.message.match(/similar to (.+?) \(/);
|
|
386
|
+
return {
|
|
387
|
+
files: [
|
|
388
|
+
{
|
|
389
|
+
path: issue.location.file,
|
|
390
|
+
startLine: issue.location.line,
|
|
391
|
+
endLine: 0
|
|
392
|
+
// Not available from Issue
|
|
393
|
+
},
|
|
394
|
+
{
|
|
395
|
+
path: fileMatch?.[1] || "unknown",
|
|
396
|
+
startLine: 0,
|
|
397
|
+
// Not available from Issue
|
|
398
|
+
endLine: 0
|
|
399
|
+
// Not available from Issue
|
|
400
|
+
}
|
|
401
|
+
],
|
|
402
|
+
similarity: similarityMatch ? parseInt(similarityMatch[1]) / 100 : 0,
|
|
403
|
+
patternType: typeMatch?.[1] || "unknown",
|
|
404
|
+
tokenCost: tokenMatch ? parseInt(tokenMatch[1]) : 0
|
|
405
|
+
};
|
|
406
|
+
});
|
|
407
|
+
return {
|
|
408
|
+
totalPatterns: allIssues.length,
|
|
409
|
+
totalTokenCost,
|
|
410
|
+
patternsByType,
|
|
411
|
+
topDuplicates
|
|
412
|
+
};
|
|
413
|
+
}
|
|
414
|
+
|
|
415
|
+
export {
|
|
416
|
+
detectDuplicatePatterns,
|
|
417
|
+
analyzePatterns,
|
|
418
|
+
generateSummary
|
|
419
|
+
};
|
package/dist/cli.js
CHANGED
|
@@ -382,7 +382,7 @@ async function analyzePatterns(options) {
|
|
|
382
382
|
}
|
|
383
383
|
});
|
|
384
384
|
}
|
|
385
|
-
return results;
|
|
385
|
+
return { results, duplicates, files };
|
|
386
386
|
}
|
|
387
387
|
function generateSummary(results) {
|
|
388
388
|
const allIssues = results.flatMap((r) => r.issues);
|
|
@@ -496,24 +496,7 @@ program.name("aiready-patterns").description("Detect duplicate patterns in your
|
|
|
496
496
|
];
|
|
497
497
|
finalOptions.exclude = finalOptions.exclude ? [...finalOptions.exclude, ...testPatterns] : testPatterns;
|
|
498
498
|
}
|
|
499
|
-
const results = await analyzePatterns(finalOptions);
|
|
500
|
-
const { scanFiles: scanFiles2, readFileContent: readFileContent2 } = await import("@aiready/core");
|
|
501
|
-
const files = await scanFiles2(finalOptions);
|
|
502
|
-
const fileContents = await Promise.all(
|
|
503
|
-
files.map(async (file) => ({
|
|
504
|
-
file,
|
|
505
|
-
content: await readFileContent2(file)
|
|
506
|
-
}))
|
|
507
|
-
);
|
|
508
|
-
const rawDuplicates = await detectDuplicatePatterns(fileContents, {
|
|
509
|
-
minSimilarity: finalOptions.minSimilarity,
|
|
510
|
-
minLines: finalOptions.minLines,
|
|
511
|
-
batchSize: finalOptions.batchSize,
|
|
512
|
-
approx: finalOptions.approx,
|
|
513
|
-
minSharedTokens: finalOptions.minSharedTokens,
|
|
514
|
-
maxCandidatesPerBlock: finalOptions.maxCandidatesPerBlock,
|
|
515
|
-
streamResults: finalOptions.streamResults
|
|
516
|
-
});
|
|
499
|
+
const { results, duplicates: rawDuplicates, files } = await analyzePatterns(finalOptions);
|
|
517
500
|
const elapsedTime = ((Date.now() - startTime) / 1e3).toFixed(2);
|
|
518
501
|
const summary = generateSummary(results);
|
|
519
502
|
const totalIssues = results.reduce((sum, r) => sum + r.issues.length, 0);
|
package/dist/cli.mjs
CHANGED
|
@@ -1,9 +1,8 @@
|
|
|
1
1
|
#!/usr/bin/env node
|
|
2
2
|
import {
|
|
3
3
|
analyzePatterns,
|
|
4
|
-
detectDuplicatePatterns,
|
|
5
4
|
generateSummary
|
|
6
|
-
} from "./chunk-
|
|
5
|
+
} from "./chunk-VRMXVYDZ.mjs";
|
|
7
6
|
|
|
8
7
|
// src/cli.ts
|
|
9
8
|
import { Command } from "commander";
|
|
@@ -62,24 +61,7 @@ program.name("aiready-patterns").description("Detect duplicate patterns in your
|
|
|
62
61
|
];
|
|
63
62
|
finalOptions.exclude = finalOptions.exclude ? [...finalOptions.exclude, ...testPatterns] : testPatterns;
|
|
64
63
|
}
|
|
65
|
-
const results = await analyzePatterns(finalOptions);
|
|
66
|
-
const { scanFiles, readFileContent } = await import("@aiready/core");
|
|
67
|
-
const files = await scanFiles(finalOptions);
|
|
68
|
-
const fileContents = await Promise.all(
|
|
69
|
-
files.map(async (file) => ({
|
|
70
|
-
file,
|
|
71
|
-
content: await readFileContent(file)
|
|
72
|
-
}))
|
|
73
|
-
);
|
|
74
|
-
const rawDuplicates = await detectDuplicatePatterns(fileContents, {
|
|
75
|
-
minSimilarity: finalOptions.minSimilarity,
|
|
76
|
-
minLines: finalOptions.minLines,
|
|
77
|
-
batchSize: finalOptions.batchSize,
|
|
78
|
-
approx: finalOptions.approx,
|
|
79
|
-
minSharedTokens: finalOptions.minSharedTokens,
|
|
80
|
-
maxCandidatesPerBlock: finalOptions.maxCandidatesPerBlock,
|
|
81
|
-
streamResults: finalOptions.streamResults
|
|
82
|
-
});
|
|
64
|
+
const { results, duplicates: rawDuplicates, files } = await analyzePatterns(finalOptions);
|
|
83
65
|
const elapsedTime = ((Date.now() - startTime) / 1e3).toFixed(2);
|
|
84
66
|
const summary = generateSummary(results);
|
|
85
67
|
const totalIssues = results.reduce((sum, r) => sum + r.issues.length, 0);
|
package/dist/index.d.mts
CHANGED
|
@@ -60,7 +60,11 @@ interface PatternSummary {
|
|
|
60
60
|
tokenCost: number;
|
|
61
61
|
}>;
|
|
62
62
|
}
|
|
63
|
-
declare function analyzePatterns(options: PatternDetectOptions): Promise<
|
|
63
|
+
declare function analyzePatterns(options: PatternDetectOptions): Promise<{
|
|
64
|
+
results: AnalysisResult[];
|
|
65
|
+
duplicates: DuplicatePattern[];
|
|
66
|
+
files: string[];
|
|
67
|
+
}>;
|
|
64
68
|
/**
|
|
65
69
|
* Generate a summary of pattern analysis
|
|
66
70
|
*/
|
package/dist/index.d.ts
CHANGED
|
@@ -60,7 +60,11 @@ interface PatternSummary {
|
|
|
60
60
|
tokenCost: number;
|
|
61
61
|
}>;
|
|
62
62
|
}
|
|
63
|
-
declare function analyzePatterns(options: PatternDetectOptions): Promise<
|
|
63
|
+
declare function analyzePatterns(options: PatternDetectOptions): Promise<{
|
|
64
|
+
results: AnalysisResult[];
|
|
65
|
+
duplicates: DuplicatePattern[];
|
|
66
|
+
files: string[];
|
|
67
|
+
}>;
|
|
64
68
|
/**
|
|
65
69
|
* Generate a summary of pattern analysis
|
|
66
70
|
*/
|
package/dist/index.js
CHANGED
package/dist/index.mjs
CHANGED
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@aiready/pattern-detect",
|
|
3
|
-
"version": "0.7.
|
|
3
|
+
"version": "0.7.3",
|
|
4
4
|
"description": "Semantic duplicate pattern detection for AI-generated code - finds similar implementations that waste AI context tokens",
|
|
5
5
|
"main": "./dist/index.js",
|
|
6
6
|
"module": "./dist/index.mjs",
|