@aiready/pattern-detect 0.16.14 → 0.16.19
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/analyzer-entry-BVz-HnZd.d.mts +119 -0
- package/dist/analyzer-entry-BwuoiCNm.d.ts +119 -0
- package/dist/analyzer-entry.d.mts +3 -0
- package/dist/analyzer-entry.d.ts +3 -0
- package/dist/analyzer-entry.js +810 -0
- package/dist/analyzer-entry.mjs +12 -0
- package/dist/chunk-I6ETJC7L.mjs +179 -0
- package/dist/chunk-THF4RW63.mjs +254 -0
- package/dist/chunk-UB3CGOQ7.mjs +64 -0
- package/dist/chunk-WBBO35SC.mjs +112 -0
- package/dist/chunk-WMOGJFME.mjs +391 -0
- package/dist/cli.js +37 -76
- package/dist/cli.mjs +52 -79
- package/dist/context-rules-entry-y2uJSngh.d.mts +60 -0
- package/dist/context-rules-entry-y2uJSngh.d.ts +60 -0
- package/dist/context-rules-entry.d.mts +2 -0
- package/dist/context-rules-entry.d.ts +2 -0
- package/dist/context-rules-entry.js +207 -0
- package/dist/context-rules-entry.mjs +12 -0
- package/dist/detector-entry.d.mts +14 -0
- package/dist/detector-entry.d.ts +14 -0
- package/dist/detector-entry.js +418 -0
- package/dist/detector-entry.mjs +7 -0
- package/dist/index.d.mts +7 -235
- package/dist/index.d.ts +7 -235
- package/dist/index.mjs +17 -9
- package/dist/scoring-entry.d.mts +23 -0
- package/dist/scoring-entry.d.ts +23 -0
- package/dist/scoring-entry.js +133 -0
- package/dist/scoring-entry.mjs +6 -0
- package/dist/types-DU2mmhwb.d.mts +36 -0
- package/dist/types-DU2mmhwb.d.ts +36 -0
- package/package.json +24 -4
|
@@ -0,0 +1,810 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __create = Object.create;
|
|
3
|
+
var __defProp = Object.defineProperty;
|
|
4
|
+
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
5
|
+
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
6
|
+
var __getProtoOf = Object.getPrototypeOf;
|
|
7
|
+
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
|
8
|
+
var __export = (target, all) => {
|
|
9
|
+
for (var name in all)
|
|
10
|
+
__defProp(target, name, { get: all[name], enumerable: true });
|
|
11
|
+
};
|
|
12
|
+
var __copyProps = (to, from, except, desc) => {
|
|
13
|
+
if (from && typeof from === "object" || typeof from === "function") {
|
|
14
|
+
for (let key of __getOwnPropNames(from))
|
|
15
|
+
if (!__hasOwnProp.call(to, key) && key !== except)
|
|
16
|
+
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
|
17
|
+
}
|
|
18
|
+
return to;
|
|
19
|
+
};
|
|
20
|
+
var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
|
|
21
|
+
// If the importer is in node compatibility mode or this is not an ESM
|
|
22
|
+
// file that has been converted to a CommonJS file using a Babel-
|
|
23
|
+
// compatible transform (i.e. "__esModule" has not been set), then set
|
|
24
|
+
// "default" to the CommonJS "module.exports" for node compatibility.
|
|
25
|
+
isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
|
|
26
|
+
mod
|
|
27
|
+
));
|
|
28
|
+
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
|
29
|
+
|
|
30
|
+
// src/analyzer-entry.ts
|
|
31
|
+
var analyzer_entry_exports = {};
|
|
32
|
+
__export(analyzer_entry_exports, {
|
|
33
|
+
analyzePatterns: () => analyzePatterns,
|
|
34
|
+
generateSummary: () => generateSummary,
|
|
35
|
+
getSmartDefaults: () => getSmartDefaults
|
|
36
|
+
});
|
|
37
|
+
module.exports = __toCommonJS(analyzer_entry_exports);
|
|
38
|
+
|
|
39
|
+
// src/analyzer.ts
|
|
40
|
+
var import_core4 = require("@aiready/core");
|
|
41
|
+
|
|
42
|
+
// src/detector.ts
|
|
43
|
+
var import_core2 = require("@aiready/core");
|
|
44
|
+
|
|
45
|
+
// src/context-rules.ts
|
|
46
|
+
var import_core = require("@aiready/core");
|
|
47
|
+
var CONTEXT_RULES = [
|
|
48
|
+
// Test Fixtures - Intentional duplication for test isolation
|
|
49
|
+
{
|
|
50
|
+
name: "test-fixtures",
|
|
51
|
+
detect: (file, code) => {
|
|
52
|
+
const isTestFile = file.includes(".test.") || file.includes(".spec.") || file.includes("__tests__") || file.includes("/test/") || file.includes("/tests/");
|
|
53
|
+
const hasTestFixtures = code.includes("beforeAll") || code.includes("afterAll") || code.includes("beforeEach") || code.includes("afterEach") || code.includes("setUp") || code.includes("tearDown");
|
|
54
|
+
return isTestFile && hasTestFixtures;
|
|
55
|
+
},
|
|
56
|
+
severity: import_core.Severity.Info,
|
|
57
|
+
reason: "Test fixture duplication is intentional for test isolation",
|
|
58
|
+
suggestion: "Consider if shared test setup would improve maintainability without coupling tests"
|
|
59
|
+
},
|
|
60
|
+
// Email/Document Templates - Often intentionally similar for consistency
|
|
61
|
+
{
|
|
62
|
+
name: "templates",
|
|
63
|
+
detect: (file, code) => {
|
|
64
|
+
const isTemplate = file.includes("/templates/") || file.includes("-template") || file.includes("/email-templates/") || file.includes("/emails/");
|
|
65
|
+
const hasTemplateContent = (code.includes("return") || code.includes("export")) && (code.includes("html") || code.includes("subject") || code.includes("body"));
|
|
66
|
+
return isTemplate && hasTemplateContent;
|
|
67
|
+
},
|
|
68
|
+
severity: import_core.Severity.Minor,
|
|
69
|
+
reason: "Template duplication may be intentional for maintainability and branding consistency",
|
|
70
|
+
suggestion: "Extract shared structure only if templates become hard to maintain"
|
|
71
|
+
},
|
|
72
|
+
// E2E/Integration Test Page Objects - Test independence
|
|
73
|
+
{
|
|
74
|
+
name: "e2e-page-objects",
|
|
75
|
+
detect: (file, code) => {
|
|
76
|
+
const isE2ETest = file.includes("e2e/") || file.includes("/e2e/") || file.includes(".e2e.") || file.includes("/playwright/") || file.includes("playwright/") || file.includes("/cypress/") || file.includes("cypress/") || file.includes("/integration/") || file.includes("integration/");
|
|
77
|
+
const hasPageObjectPatterns = code.includes("page.") || code.includes("await page") || code.includes("locator") || code.includes("getBy") || code.includes("selector") || code.includes("click(") || code.includes("fill(");
|
|
78
|
+
return isE2ETest && hasPageObjectPatterns;
|
|
79
|
+
},
|
|
80
|
+
severity: import_core.Severity.Minor,
|
|
81
|
+
reason: "E2E test duplication ensures test independence and reduces coupling",
|
|
82
|
+
suggestion: "Consider page object pattern only if duplication causes maintenance issues"
|
|
83
|
+
},
|
|
84
|
+
// Configuration Files - Often necessarily similar by design
|
|
85
|
+
{
|
|
86
|
+
name: "config-files",
|
|
87
|
+
detect: (file) => {
|
|
88
|
+
return file.endsWith(".config.ts") || file.endsWith(".config.js") || file.includes("jest.config") || file.includes("vite.config") || file.includes("webpack.config") || file.includes("rollup.config") || file.includes("tsconfig");
|
|
89
|
+
},
|
|
90
|
+
severity: import_core.Severity.Minor,
|
|
91
|
+
reason: "Configuration files often have similar structure by design",
|
|
92
|
+
suggestion: "Consider shared config base only if configurations become hard to maintain"
|
|
93
|
+
},
|
|
94
|
+
// Type Definitions - Duplication for type safety and module independence
|
|
95
|
+
{
|
|
96
|
+
name: "type-definitions",
|
|
97
|
+
detect: (file, code) => {
|
|
98
|
+
const isTypeFile = file.endsWith(".d.ts") || file.includes("/types/");
|
|
99
|
+
const hasTypeDefinitions = code.includes("interface ") || code.includes("type ") || code.includes("enum ");
|
|
100
|
+
return isTypeFile && hasTypeDefinitions;
|
|
101
|
+
},
|
|
102
|
+
severity: import_core.Severity.Info,
|
|
103
|
+
reason: "Type duplication may be intentional for module independence and type safety",
|
|
104
|
+
suggestion: "Extract to shared types package only if causing maintenance burden"
|
|
105
|
+
},
|
|
106
|
+
// Migration Scripts - One-off scripts that are similar by nature
|
|
107
|
+
{
|
|
108
|
+
name: "migration-scripts",
|
|
109
|
+
detect: (file) => {
|
|
110
|
+
return file.includes("/migrations/") || file.includes("/migrate/") || file.includes(".migration.");
|
|
111
|
+
},
|
|
112
|
+
severity: import_core.Severity.Info,
|
|
113
|
+
reason: "Migration scripts are typically one-off and intentionally similar",
|
|
114
|
+
suggestion: "Duplication is acceptable for migration scripts"
|
|
115
|
+
},
|
|
116
|
+
// Mock Data - Test data intentionally duplicated
|
|
117
|
+
{
|
|
118
|
+
name: "mock-data",
|
|
119
|
+
detect: (file, code) => {
|
|
120
|
+
const isMockFile = file.includes("/mocks/") || file.includes("/__mocks__/") || file.includes("/fixtures/") || file.includes(".mock.") || file.includes(".fixture.");
|
|
121
|
+
const hasMockData = code.includes("mock") || code.includes("Mock") || code.includes("fixture") || code.includes("stub") || code.includes("export const");
|
|
122
|
+
return isMockFile && hasMockData;
|
|
123
|
+
},
|
|
124
|
+
severity: import_core.Severity.Info,
|
|
125
|
+
reason: "Mock data duplication is expected for comprehensive test coverage",
|
|
126
|
+
suggestion: "Consider shared factories only for complex mock generation"
|
|
127
|
+
},
|
|
128
|
+
// Tool Implementations - Structural Boilerplate
|
|
129
|
+
{
|
|
130
|
+
name: "tool-implementations",
|
|
131
|
+
detect: (file, code) => {
|
|
132
|
+
const isToolFile = file.includes("/tools/") || file.endsWith(".tool.ts") || code.includes("toolDefinitions");
|
|
133
|
+
const hasToolStructure = code.includes("execute") && (code.includes("try") || code.includes("catch"));
|
|
134
|
+
return isToolFile && hasToolStructure;
|
|
135
|
+
},
|
|
136
|
+
severity: import_core.Severity.Info,
|
|
137
|
+
reason: "Tool implementations share structural boilerplate but have distinct business logic",
|
|
138
|
+
suggestion: "Tool duplication is acceptable for boilerplate interface wrappers"
|
|
139
|
+
}
|
|
140
|
+
];
|
|
141
|
+
function calculateSeverity(file1, file2, code, similarity, linesOfCode) {
|
|
142
|
+
for (const rule of CONTEXT_RULES) {
|
|
143
|
+
if (rule.detect(file1, code) || rule.detect(file2, code)) {
|
|
144
|
+
return {
|
|
145
|
+
severity: rule.severity,
|
|
146
|
+
reason: rule.reason,
|
|
147
|
+
suggestion: rule.suggestion,
|
|
148
|
+
matchedRule: rule.name
|
|
149
|
+
};
|
|
150
|
+
}
|
|
151
|
+
}
|
|
152
|
+
if (similarity >= 0.95 && linesOfCode >= 30) {
|
|
153
|
+
return {
|
|
154
|
+
severity: import_core.Severity.Critical,
|
|
155
|
+
reason: "Large nearly-identical code blocks waste tokens and create maintenance burden",
|
|
156
|
+
suggestion: "Extract to shared utility module immediately"
|
|
157
|
+
};
|
|
158
|
+
} else if (similarity >= 0.95 && linesOfCode >= 15) {
|
|
159
|
+
return {
|
|
160
|
+
severity: import_core.Severity.Major,
|
|
161
|
+
reason: "Nearly identical code should be consolidated",
|
|
162
|
+
suggestion: "Move to shared utility file"
|
|
163
|
+
};
|
|
164
|
+
} else if (similarity >= 0.85) {
|
|
165
|
+
return {
|
|
166
|
+
severity: import_core.Severity.Major,
|
|
167
|
+
reason: "High similarity indicates significant duplication",
|
|
168
|
+
suggestion: "Extract common logic to shared function"
|
|
169
|
+
};
|
|
170
|
+
} else if (similarity >= 0.7) {
|
|
171
|
+
return {
|
|
172
|
+
severity: import_core.Severity.Minor,
|
|
173
|
+
reason: "Moderate similarity detected",
|
|
174
|
+
suggestion: "Consider extracting shared patterns if code evolves together"
|
|
175
|
+
};
|
|
176
|
+
} else {
|
|
177
|
+
return {
|
|
178
|
+
severity: import_core.Severity.Minor,
|
|
179
|
+
reason: "Minor similarity detected",
|
|
180
|
+
suggestion: "Monitor but refactoring may not be worthwhile"
|
|
181
|
+
};
|
|
182
|
+
}
|
|
183
|
+
}
|
|
184
|
+
|
|
185
|
+
// src/detector.ts
|
|
186
|
+
function normalizeCode(code, isPython = false) {
|
|
187
|
+
let normalized = code;
|
|
188
|
+
if (isPython) {
|
|
189
|
+
normalized = normalized.replace(/#.*/g, "");
|
|
190
|
+
} else {
|
|
191
|
+
normalized = normalized.replace(/\/\/.*/g, "").replace(/\/\*[\s\S]*?\*\//g, "");
|
|
192
|
+
}
|
|
193
|
+
return normalized.replace(/['"`]/g, '"').replace(/\s+/g, " ").trim().toLowerCase();
|
|
194
|
+
}
|
|
195
|
+
function extractBlocks(file, content) {
|
|
196
|
+
const isPython = file.toLowerCase().endsWith(".py");
|
|
197
|
+
if (isPython) {
|
|
198
|
+
return extractBlocksPython(file, content);
|
|
199
|
+
}
|
|
200
|
+
const blocks = [];
|
|
201
|
+
const lines = content.split("\n");
|
|
202
|
+
const blockRegex = /^\s*(?:export\s+)?(?:async\s+)?(?:public\s+|private\s+|protected\s+|internal\s+|static\s+|readonly\s+|virtual\s+|abstract\s+|override\s+)*(function|class|interface|type|enum|record|struct|void|func|[a-zA-Z0-9_<>[]]+)\s+([a-zA-Z0-9_]+)(?:\s*\(|(?:\s+extends|\s+implements|\s+where)?\s*\{)|^\s*(?:export\s+)?const\s+([a-zA-Z0-9_]+)\s*=\s*[a-zA-Z0-9_.]+\.object\(|^\s*(app\.(?:get|post|put|delete|patch|use))\(/gm;
|
|
203
|
+
let match;
|
|
204
|
+
while ((match = blockRegex.exec(content)) !== null) {
|
|
205
|
+
const startLine = content.substring(0, match.index).split("\n").length;
|
|
206
|
+
let type;
|
|
207
|
+
let name;
|
|
208
|
+
if (match[1]) {
|
|
209
|
+
type = match[1];
|
|
210
|
+
name = match[2];
|
|
211
|
+
} else if (match[3]) {
|
|
212
|
+
type = "const";
|
|
213
|
+
name = match[3];
|
|
214
|
+
} else {
|
|
215
|
+
type = "handler";
|
|
216
|
+
name = match[4];
|
|
217
|
+
}
|
|
218
|
+
let endLine = -1;
|
|
219
|
+
let openBraces = 0;
|
|
220
|
+
let foundStart = false;
|
|
221
|
+
for (let i = match.index; i < content.length; i++) {
|
|
222
|
+
if (content[i] === "{") {
|
|
223
|
+
openBraces++;
|
|
224
|
+
foundStart = true;
|
|
225
|
+
} else if (content[i] === "}") {
|
|
226
|
+
openBraces--;
|
|
227
|
+
}
|
|
228
|
+
if (foundStart && openBraces === 0) {
|
|
229
|
+
endLine = content.substring(0, i + 1).split("\n").length;
|
|
230
|
+
break;
|
|
231
|
+
}
|
|
232
|
+
}
|
|
233
|
+
if (endLine === -1) {
|
|
234
|
+
const remaining = content.slice(match.index);
|
|
235
|
+
const nextLineMatch = remaining.indexOf("\n");
|
|
236
|
+
if (nextLineMatch !== -1) {
|
|
237
|
+
endLine = startLine;
|
|
238
|
+
} else {
|
|
239
|
+
endLine = lines.length;
|
|
240
|
+
}
|
|
241
|
+
}
|
|
242
|
+
endLine = Math.max(startLine, endLine);
|
|
243
|
+
const blockCode = lines.slice(startLine - 1, endLine).join("\n");
|
|
244
|
+
const tokens = (0, import_core2.estimateTokens)(blockCode);
|
|
245
|
+
blocks.push({
|
|
246
|
+
file,
|
|
247
|
+
startLine,
|
|
248
|
+
endLine,
|
|
249
|
+
code: blockCode,
|
|
250
|
+
tokens,
|
|
251
|
+
patternType: inferPatternType(type, name)
|
|
252
|
+
});
|
|
253
|
+
}
|
|
254
|
+
return blocks;
|
|
255
|
+
}
|
|
256
|
+
function extractBlocksPython(file, content) {
|
|
257
|
+
const blocks = [];
|
|
258
|
+
const lines = content.split("\n");
|
|
259
|
+
const blockRegex = /^\s*(?:async\s+)?(def|class)\s+([a-zA-Z0-9_]+)/gm;
|
|
260
|
+
let match;
|
|
261
|
+
while ((match = blockRegex.exec(content)) !== null) {
|
|
262
|
+
const startLinePos = content.substring(0, match.index).split("\n").length;
|
|
263
|
+
const startLineIdx = startLinePos - 1;
|
|
264
|
+
const initialIndent = lines[startLineIdx].search(/\S/);
|
|
265
|
+
let endLineIdx = startLineIdx;
|
|
266
|
+
for (let i = startLineIdx + 1; i < lines.length; i++) {
|
|
267
|
+
const line = lines[i];
|
|
268
|
+
if (line.trim().length === 0) {
|
|
269
|
+
endLineIdx = i;
|
|
270
|
+
continue;
|
|
271
|
+
}
|
|
272
|
+
const currentIndent = line.search(/\S/);
|
|
273
|
+
if (currentIndent <= initialIndent) {
|
|
274
|
+
break;
|
|
275
|
+
}
|
|
276
|
+
endLineIdx = i;
|
|
277
|
+
}
|
|
278
|
+
while (endLineIdx > startLineIdx && lines[endLineIdx].trim().length === 0) {
|
|
279
|
+
endLineIdx--;
|
|
280
|
+
}
|
|
281
|
+
const blockCode = lines.slice(startLineIdx, endLineIdx + 1).join("\n");
|
|
282
|
+
const tokens = (0, import_core2.estimateTokens)(blockCode);
|
|
283
|
+
blocks.push({
|
|
284
|
+
file,
|
|
285
|
+
startLine: startLinePos,
|
|
286
|
+
endLine: endLineIdx + 1,
|
|
287
|
+
code: blockCode,
|
|
288
|
+
tokens,
|
|
289
|
+
patternType: inferPatternType(match[1], match[2])
|
|
290
|
+
});
|
|
291
|
+
}
|
|
292
|
+
return blocks;
|
|
293
|
+
}
|
|
294
|
+
function inferPatternType(keyword, name) {
|
|
295
|
+
const n = name.toLowerCase();
|
|
296
|
+
if (keyword === "handler" || n.includes("handler") || n.includes("controller") || n.startsWith("app.")) {
|
|
297
|
+
return "api-handler";
|
|
298
|
+
}
|
|
299
|
+
if (n.includes("validate") || n.includes("schema")) return "validator";
|
|
300
|
+
if (n.includes("util") || n.includes("helper")) return "utility";
|
|
301
|
+
if (keyword === "class") return "class-method";
|
|
302
|
+
if (n.match(/^[A-Z]/)) return "component";
|
|
303
|
+
if (keyword === "function") return "function";
|
|
304
|
+
return "unknown";
|
|
305
|
+
}
|
|
306
|
+
function calculateSimilarity(a, b) {
|
|
307
|
+
if (a === b) return 1;
|
|
308
|
+
const tokensA = a.split(/[^a-zA-Z0-9]+/).filter((t) => t.length > 0);
|
|
309
|
+
const tokensB = b.split(/[^a-zA-Z0-9]+/).filter((t) => t.length > 0);
|
|
310
|
+
if (tokensA.length === 0 || tokensB.length === 0) return 0;
|
|
311
|
+
const setA = new Set(tokensA);
|
|
312
|
+
const setB = new Set(tokensB);
|
|
313
|
+
const intersection = new Set([...setA].filter((x) => setB.has(x)));
|
|
314
|
+
const union = /* @__PURE__ */ new Set([...setA, ...setB]);
|
|
315
|
+
return intersection.size / union.size;
|
|
316
|
+
}
|
|
317
|
+
function calculateConfidence(similarity, tokens, lines) {
|
|
318
|
+
let confidence = similarity;
|
|
319
|
+
if (lines > 20) confidence += 0.05;
|
|
320
|
+
if (tokens > 200) confidence += 0.05;
|
|
321
|
+
if (lines < 5) confidence -= 0.1;
|
|
322
|
+
return Math.max(0, Math.min(1, confidence));
|
|
323
|
+
}
|
|
324
|
+
async function detectDuplicatePatterns(fileContents, options) {
|
|
325
|
+
const {
|
|
326
|
+
minSimilarity,
|
|
327
|
+
minLines,
|
|
328
|
+
streamResults,
|
|
329
|
+
onProgress,
|
|
330
|
+
excludePatterns = [],
|
|
331
|
+
confidenceThreshold = 0,
|
|
332
|
+
ignoreWhitelist = []
|
|
333
|
+
} = options;
|
|
334
|
+
const allBlocks = [];
|
|
335
|
+
const excludeRegexes = excludePatterns.map((p) => new RegExp(p, "i"));
|
|
336
|
+
for (const { file, content } of fileContents) {
|
|
337
|
+
const blocks = extractBlocks(file, content);
|
|
338
|
+
for (const b of blocks) {
|
|
339
|
+
if (b.endLine - b.startLine + 1 < minLines) continue;
|
|
340
|
+
const isExcluded = excludeRegexes.some((regex) => regex.test(b.code));
|
|
341
|
+
if (isExcluded) continue;
|
|
342
|
+
allBlocks.push(b);
|
|
343
|
+
}
|
|
344
|
+
}
|
|
345
|
+
const duplicates = [];
|
|
346
|
+
const totalBlocks = allBlocks.length;
|
|
347
|
+
let comparisons = 0;
|
|
348
|
+
const totalComparisons = totalBlocks * (totalBlocks - 1) / 2;
|
|
349
|
+
if (onProgress) {
|
|
350
|
+
onProgress(
|
|
351
|
+
0,
|
|
352
|
+
totalComparisons,
|
|
353
|
+
`Starting duplicate detection on ${totalBlocks} blocks...`
|
|
354
|
+
);
|
|
355
|
+
}
|
|
356
|
+
for (let i = 0; i < allBlocks.length; i++) {
|
|
357
|
+
if (i % 50 === 0 && i > 0) {
|
|
358
|
+
await new Promise((resolve) => setImmediate(resolve));
|
|
359
|
+
if (onProgress) {
|
|
360
|
+
onProgress(
|
|
361
|
+
comparisons,
|
|
362
|
+
totalComparisons,
|
|
363
|
+
`Analyzing blocks (${i}/${totalBlocks})...`
|
|
364
|
+
);
|
|
365
|
+
}
|
|
366
|
+
}
|
|
367
|
+
const b1 = allBlocks[i];
|
|
368
|
+
const isPython1 = b1.file.toLowerCase().endsWith(".py");
|
|
369
|
+
const norm1 = normalizeCode(b1.code, isPython1);
|
|
370
|
+
for (let j = i + 1; j < allBlocks.length; j++) {
|
|
371
|
+
comparisons++;
|
|
372
|
+
const b2 = allBlocks[j];
|
|
373
|
+
if (b1.file === b2.file) continue;
|
|
374
|
+
const isWhitelisted = ignoreWhitelist.some((pattern) => {
|
|
375
|
+
return b1.file.includes(pattern) && b2.file.includes(pattern) || pattern === `${b1.file}::${b2.file}` || pattern === `${b2.file}::${b1.file}`;
|
|
376
|
+
});
|
|
377
|
+
if (isWhitelisted) continue;
|
|
378
|
+
const isPython2 = b2.file.toLowerCase().endsWith(".py");
|
|
379
|
+
const norm2 = normalizeCode(b2.code, isPython2);
|
|
380
|
+
const sim = calculateSimilarity(norm1, norm2);
|
|
381
|
+
if (sim >= minSimilarity) {
|
|
382
|
+
const confidence = calculateConfidence(
|
|
383
|
+
sim,
|
|
384
|
+
b1.tokens,
|
|
385
|
+
b1.endLine - b1.startLine + 1
|
|
386
|
+
);
|
|
387
|
+
if (confidence < confidenceThreshold) continue;
|
|
388
|
+
const { severity, reason, suggestion, matchedRule } = calculateSeverity(
|
|
389
|
+
b1.file,
|
|
390
|
+
b2.file,
|
|
391
|
+
b1.code,
|
|
392
|
+
sim,
|
|
393
|
+
b1.endLine - b1.startLine + 1
|
|
394
|
+
);
|
|
395
|
+
const dup = {
|
|
396
|
+
file1: b1.file,
|
|
397
|
+
line1: b1.startLine,
|
|
398
|
+
endLine1: b1.endLine,
|
|
399
|
+
file2: b2.file,
|
|
400
|
+
line2: b2.startLine,
|
|
401
|
+
endLine2: b2.endLine,
|
|
402
|
+
code1: b1.code,
|
|
403
|
+
code2: b2.code,
|
|
404
|
+
similarity: sim,
|
|
405
|
+
confidence,
|
|
406
|
+
patternType: b1.patternType,
|
|
407
|
+
tokenCost: b1.tokens + b2.tokens,
|
|
408
|
+
severity,
|
|
409
|
+
reason,
|
|
410
|
+
suggestion,
|
|
411
|
+
matchedRule
|
|
412
|
+
};
|
|
413
|
+
duplicates.push(dup);
|
|
414
|
+
if (streamResults)
|
|
415
|
+
console.log(
|
|
416
|
+
`[DUPLICATE] ${dup.file1}:${dup.line1} <-> ${dup.file2}:${dup.line2} (${Math.round(sim * 100)}%, conf: ${Math.round(confidence * 100)}%)`
|
|
417
|
+
);
|
|
418
|
+
}
|
|
419
|
+
}
|
|
420
|
+
}
|
|
421
|
+
if (onProgress) {
|
|
422
|
+
onProgress(
|
|
423
|
+
totalComparisons,
|
|
424
|
+
totalComparisons,
|
|
425
|
+
`Duplicate detection complete. Found ${duplicates.length} patterns.`
|
|
426
|
+
);
|
|
427
|
+
}
|
|
428
|
+
return duplicates.sort((a, b) => b.similarity - a.similarity);
|
|
429
|
+
}
|
|
430
|
+
|
|
431
|
+
// src/grouping.ts
|
|
432
|
+
var import_core3 = require("@aiready/core");
|
|
433
|
+
var import_path = __toESM(require("path"));
|
|
434
|
+
function groupDuplicatesByFilePair(duplicates) {
|
|
435
|
+
const groups = /* @__PURE__ */ new Map();
|
|
436
|
+
for (const dup of duplicates) {
|
|
437
|
+
const files = [dup.file1, dup.file2].sort();
|
|
438
|
+
const key = files.join("::");
|
|
439
|
+
if (!groups.has(key)) {
|
|
440
|
+
groups.set(key, {
|
|
441
|
+
filePair: key,
|
|
442
|
+
severity: dup.severity,
|
|
443
|
+
occurrences: 0,
|
|
444
|
+
totalTokenCost: 0,
|
|
445
|
+
averageSimilarity: 0,
|
|
446
|
+
patternTypes: /* @__PURE__ */ new Set(),
|
|
447
|
+
lineRanges: []
|
|
448
|
+
});
|
|
449
|
+
}
|
|
450
|
+
const group = groups.get(key);
|
|
451
|
+
group.occurrences++;
|
|
452
|
+
group.totalTokenCost += dup.tokenCost;
|
|
453
|
+
group.averageSimilarity += dup.similarity;
|
|
454
|
+
group.patternTypes.add(dup.patternType);
|
|
455
|
+
group.lineRanges.push({
|
|
456
|
+
file1: { start: dup.line1, end: dup.endLine1 },
|
|
457
|
+
file2: { start: dup.line2, end: dup.endLine2 }
|
|
458
|
+
});
|
|
459
|
+
const currentSev = dup.severity;
|
|
460
|
+
if ((0, import_core3.getSeverityLevel)(currentSev) > (0, import_core3.getSeverityLevel)(group.severity)) {
|
|
461
|
+
group.severity = currentSev;
|
|
462
|
+
}
|
|
463
|
+
}
|
|
464
|
+
return Array.from(groups.values()).map((g) => ({
|
|
465
|
+
...g,
|
|
466
|
+
averageSimilarity: g.averageSimilarity / g.occurrences
|
|
467
|
+
}));
|
|
468
|
+
}
|
|
469
|
+
function createRefactorClusters(duplicates) {
|
|
470
|
+
const adjacency = /* @__PURE__ */ new Map();
|
|
471
|
+
const visited = /* @__PURE__ */ new Set();
|
|
472
|
+
const components = [];
|
|
473
|
+
for (const dup of duplicates) {
|
|
474
|
+
if (!adjacency.has(dup.file1)) adjacency.set(dup.file1, /* @__PURE__ */ new Set());
|
|
475
|
+
if (!adjacency.has(dup.file2)) adjacency.set(dup.file2, /* @__PURE__ */ new Set());
|
|
476
|
+
adjacency.get(dup.file1).add(dup.file2);
|
|
477
|
+
adjacency.get(dup.file2).add(dup.file1);
|
|
478
|
+
}
|
|
479
|
+
for (const file of adjacency.keys()) {
|
|
480
|
+
if (visited.has(file)) continue;
|
|
481
|
+
const component = [];
|
|
482
|
+
const queue = [file];
|
|
483
|
+
visited.add(file);
|
|
484
|
+
while (queue.length > 0) {
|
|
485
|
+
const curr = queue.shift();
|
|
486
|
+
component.push(curr);
|
|
487
|
+
for (const neighbor of adjacency.get(curr) || []) {
|
|
488
|
+
if (!visited.has(neighbor)) {
|
|
489
|
+
visited.add(neighbor);
|
|
490
|
+
queue.push(neighbor);
|
|
491
|
+
}
|
|
492
|
+
}
|
|
493
|
+
}
|
|
494
|
+
components.push(component);
|
|
495
|
+
}
|
|
496
|
+
const clusters = [];
|
|
497
|
+
for (const component of components) {
|
|
498
|
+
if (component.length < 2) continue;
|
|
499
|
+
const componentDups = duplicates.filter(
|
|
500
|
+
(d) => component.includes(d.file1) && component.includes(d.file2)
|
|
501
|
+
);
|
|
502
|
+
const totalTokenCost = componentDups.reduce(
|
|
503
|
+
(sum, d) => sum + d.tokenCost,
|
|
504
|
+
0
|
|
505
|
+
);
|
|
506
|
+
const avgSimilarity = componentDups.reduce((sum, d) => sum + d.similarity, 0) / Math.max(1, componentDups.length);
|
|
507
|
+
const name = determineClusterName(component);
|
|
508
|
+
const { severity, reason, suggestion } = calculateSeverity(
|
|
509
|
+
component[0],
|
|
510
|
+
component[1],
|
|
511
|
+
"",
|
|
512
|
+
// Code not available here
|
|
513
|
+
avgSimilarity,
|
|
514
|
+
30
|
|
515
|
+
// Assume substantial if clustered
|
|
516
|
+
);
|
|
517
|
+
clusters.push({
|
|
518
|
+
id: `cluster-${clusters.length}`,
|
|
519
|
+
name,
|
|
520
|
+
files: component,
|
|
521
|
+
severity,
|
|
522
|
+
duplicateCount: componentDups.length,
|
|
523
|
+
totalTokenCost,
|
|
524
|
+
averageSimilarity: avgSimilarity,
|
|
525
|
+
reason,
|
|
526
|
+
suggestion
|
|
527
|
+
});
|
|
528
|
+
}
|
|
529
|
+
return clusters;
|
|
530
|
+
}
|
|
531
|
+
function determineClusterName(files) {
|
|
532
|
+
if (files.length === 0) return "Unknown Cluster";
|
|
533
|
+
if (files.some((f) => f.includes("blog"))) return "Blog SEO Boilerplate";
|
|
534
|
+
if (files.some((f) => f.includes("buttons")))
|
|
535
|
+
return "Button Component Variants";
|
|
536
|
+
if (files.some((f) => f.includes("cards"))) return "Card Component Variants";
|
|
537
|
+
if (files.some((f) => f.includes("login.test"))) return "E2E Test Patterns";
|
|
538
|
+
const first = files[0];
|
|
539
|
+
const dirName = import_path.default.dirname(first).split(import_path.default.sep).pop();
|
|
540
|
+
if (dirName && dirName !== "." && dirName !== "..") {
|
|
541
|
+
return `${dirName.charAt(0).toUpperCase() + dirName.slice(1)} Domain Group`;
|
|
542
|
+
}
|
|
543
|
+
return "Shared Pattern Group";
|
|
544
|
+
}
|
|
545
|
+
function filterClustersByImpact(clusters, minTokenCost = 1e3, minFiles = 3) {
|
|
546
|
+
return clusters.filter(
|
|
547
|
+
(c) => c.totalTokenCost >= minTokenCost && c.files.length >= minFiles
|
|
548
|
+
);
|
|
549
|
+
}
|
|
550
|
+
|
|
551
|
+
// src/analyzer.ts
|
|
552
|
+
function getRefactoringSuggestion(patternType, similarity) {
|
|
553
|
+
const baseMessages = {
|
|
554
|
+
"api-handler": "Extract common middleware or create a base handler class",
|
|
555
|
+
validator: "Consolidate validation logic into shared schema validators (Zod/Yup)",
|
|
556
|
+
utility: "Move to a shared utilities file and reuse across modules",
|
|
557
|
+
"class-method": "Consider inheritance or composition to share behavior",
|
|
558
|
+
component: "Extract shared logic into a custom hook or HOC",
|
|
559
|
+
function: "Extract into a shared helper function",
|
|
560
|
+
unknown: "Extract common logic into a reusable module"
|
|
561
|
+
};
|
|
562
|
+
const urgency = similarity > 0.95 ? " (CRITICAL: Nearly identical code)" : similarity > 0.9 ? " (HIGH: Very similar, refactor soon)" : "";
|
|
563
|
+
return baseMessages[patternType] + urgency;
|
|
564
|
+
}
|
|
565
|
+
async function getSmartDefaults(directory, userOptions) {
|
|
566
|
+
if (userOptions.useSmartDefaults === false) {
|
|
567
|
+
return {
|
|
568
|
+
rootDir: directory,
|
|
569
|
+
minSimilarity: 0.6,
|
|
570
|
+
minLines: 8,
|
|
571
|
+
batchSize: 100,
|
|
572
|
+
approx: true,
|
|
573
|
+
minSharedTokens: 12,
|
|
574
|
+
maxCandidatesPerBlock: 5,
|
|
575
|
+
streamResults: false,
|
|
576
|
+
severity: "all",
|
|
577
|
+
includeTests: false
|
|
578
|
+
};
|
|
579
|
+
}
|
|
580
|
+
const scanOptions = {
|
|
581
|
+
rootDir: directory,
|
|
582
|
+
include: userOptions.include || ["**/*.{ts,tsx,js,jsx,py,java}"],
|
|
583
|
+
exclude: userOptions.exclude
|
|
584
|
+
};
|
|
585
|
+
const files = await (0, import_core4.scanFiles)(scanOptions);
|
|
586
|
+
const fileCount = files.length;
|
|
587
|
+
const estimatedBlocks = fileCount * 5;
|
|
588
|
+
const minLines = Math.max(
|
|
589
|
+
6,
|
|
590
|
+
Math.min(20, 6 + Math.floor(estimatedBlocks / 1e3) * 2)
|
|
591
|
+
);
|
|
592
|
+
const minSimilarity = Math.min(0.85, 0.5 + estimatedBlocks / 5e3 * 0.3);
|
|
593
|
+
const batchSize = estimatedBlocks > 1e3 ? 200 : 100;
|
|
594
|
+
const severity = estimatedBlocks > 3e3 ? "high" : "all";
|
|
595
|
+
const maxCandidatesPerBlock = Math.max(
|
|
596
|
+
5,
|
|
597
|
+
Math.min(100, Math.floor(1e6 / estimatedBlocks))
|
|
598
|
+
);
|
|
599
|
+
const defaults = {
|
|
600
|
+
rootDir: directory,
|
|
601
|
+
minSimilarity,
|
|
602
|
+
minLines,
|
|
603
|
+
batchSize,
|
|
604
|
+
approx: true,
|
|
605
|
+
minSharedTokens: 10,
|
|
606
|
+
maxCandidatesPerBlock,
|
|
607
|
+
streamResults: false,
|
|
608
|
+
severity,
|
|
609
|
+
includeTests: false
|
|
610
|
+
};
|
|
611
|
+
const result = { ...defaults };
|
|
612
|
+
for (const key of Object.keys(defaults)) {
|
|
613
|
+
if (key in userOptions && userOptions[key] !== void 0) {
|
|
614
|
+
result[key] = userOptions[key];
|
|
615
|
+
}
|
|
616
|
+
}
|
|
617
|
+
return result;
|
|
618
|
+
}
|
|
619
|
+
function logConfiguration(config, estimatedBlocks) {
|
|
620
|
+
if (config.suppressToolConfig) return;
|
|
621
|
+
console.log("\u{1F4CB} Configuration:");
|
|
622
|
+
console.log(` Repository size: ~${estimatedBlocks} code blocks`);
|
|
623
|
+
console.log(` Similarity threshold: ${config.minSimilarity}`);
|
|
624
|
+
console.log(` Minimum lines: ${config.minLines}`);
|
|
625
|
+
console.log(` Approximate mode: ${config.approx ? "enabled" : "disabled"}`);
|
|
626
|
+
console.log(` Max candidates per block: ${config.maxCandidatesPerBlock}`);
|
|
627
|
+
console.log(` Min shared tokens: ${config.minSharedTokens}`);
|
|
628
|
+
console.log(` Severity filter: ${config.severity}`);
|
|
629
|
+
console.log(` Include tests: ${config.includeTests}`);
|
|
630
|
+
if (config.excludePatterns && config.excludePatterns.length > 0) {
|
|
631
|
+
console.log(` Exclude patterns: ${config.excludePatterns.length} active`);
|
|
632
|
+
}
|
|
633
|
+
if (config.confidenceThreshold && config.confidenceThreshold > 0) {
|
|
634
|
+
console.log(` Confidence threshold: ${config.confidenceThreshold}`);
|
|
635
|
+
}
|
|
636
|
+
if (config.ignoreWhitelist && config.ignoreWhitelist.length > 0) {
|
|
637
|
+
console.log(
|
|
638
|
+
` Ignore whitelist: ${config.ignoreWhitelist.length} entries`
|
|
639
|
+
);
|
|
640
|
+
}
|
|
641
|
+
console.log("");
|
|
642
|
+
}
|
|
643
|
+
async function analyzePatterns(options) {
|
|
644
|
+
const smartDefaults = await getSmartDefaults(options.rootDir || ".", options);
|
|
645
|
+
const finalOptions = { ...smartDefaults, ...options };
|
|
646
|
+
const {
|
|
647
|
+
minSimilarity = 0.4,
|
|
648
|
+
minLines = 5,
|
|
649
|
+
batchSize = 100,
|
|
650
|
+
approx = true,
|
|
651
|
+
minSharedTokens = 8,
|
|
652
|
+
maxCandidatesPerBlock = 100,
|
|
653
|
+
streamResults = false,
|
|
654
|
+
severity = "all",
|
|
655
|
+
groupByFilePair = true,
|
|
656
|
+
createClusters = true,
|
|
657
|
+
minClusterTokenCost = 1e3,
|
|
658
|
+
minClusterFiles = 3,
|
|
659
|
+
excludePatterns = [],
|
|
660
|
+
confidenceThreshold = 0,
|
|
661
|
+
ignoreWhitelist = [],
|
|
662
|
+
...scanOptions
|
|
663
|
+
} = finalOptions;
|
|
664
|
+
const files = await (0, import_core4.scanFiles)(scanOptions);
|
|
665
|
+
const estimatedBlocks = files.length * 3;
|
|
666
|
+
logConfiguration(finalOptions, estimatedBlocks);
|
|
667
|
+
const results = [];
|
|
668
|
+
const READ_BATCH_SIZE = 50;
|
|
669
|
+
const fileContents = [];
|
|
670
|
+
for (let i = 0; i < files.length; i += READ_BATCH_SIZE) {
|
|
671
|
+
const batch = files.slice(i, i + READ_BATCH_SIZE);
|
|
672
|
+
const batchContents = await Promise.all(
|
|
673
|
+
batch.map(async (file) => ({
|
|
674
|
+
file,
|
|
675
|
+
content: await (0, import_core4.readFileContent)(file)
|
|
676
|
+
}))
|
|
677
|
+
);
|
|
678
|
+
fileContents.push(...batchContents);
|
|
679
|
+
}
|
|
680
|
+
const duplicates = await detectDuplicatePatterns(fileContents, {
|
|
681
|
+
minSimilarity,
|
|
682
|
+
minLines,
|
|
683
|
+
batchSize,
|
|
684
|
+
approx,
|
|
685
|
+
minSharedTokens,
|
|
686
|
+
maxCandidatesPerBlock,
|
|
687
|
+
streamResults,
|
|
688
|
+
excludePatterns,
|
|
689
|
+
confidenceThreshold,
|
|
690
|
+
ignoreWhitelist,
|
|
691
|
+
onProgress: options.onProgress
|
|
692
|
+
});
|
|
693
|
+
for (const file of files) {
|
|
694
|
+
const fileDuplicates = duplicates.filter(
|
|
695
|
+
(dup) => dup.file1 === file || dup.file2 === file
|
|
696
|
+
);
|
|
697
|
+
const issues = fileDuplicates.map((dup) => {
|
|
698
|
+
const otherFile = dup.file1 === file ? dup.file2 : dup.file1;
|
|
699
|
+
const severity2 = dup.similarity > 0.95 ? import_core4.Severity.Critical : dup.similarity > 0.9 ? import_core4.Severity.Major : import_core4.Severity.Minor;
|
|
700
|
+
return {
|
|
701
|
+
type: import_core4.IssueType.DuplicatePattern,
|
|
702
|
+
severity: severity2,
|
|
703
|
+
message: `${dup.patternType} pattern ${Math.round(dup.similarity * 100)}% similar to ${otherFile} (${dup.tokenCost} tokens wasted)`,
|
|
704
|
+
location: {
|
|
705
|
+
file,
|
|
706
|
+
line: dup.file1 === file ? dup.line1 : dup.line2
|
|
707
|
+
},
|
|
708
|
+
suggestion: getRefactoringSuggestion(dup.patternType, dup.similarity)
|
|
709
|
+
};
|
|
710
|
+
});
|
|
711
|
+
let filteredIssues = issues;
|
|
712
|
+
if (severity !== "all") {
|
|
713
|
+
const severityMap = {
|
|
714
|
+
critical: [import_core4.Severity.Critical],
|
|
715
|
+
high: [import_core4.Severity.Critical, import_core4.Severity.Major],
|
|
716
|
+
medium: [import_core4.Severity.Critical, import_core4.Severity.Major, import_core4.Severity.Minor]
|
|
717
|
+
};
|
|
718
|
+
const allowedSeverities = severityMap[severity] || [import_core4.Severity.Critical, import_core4.Severity.Major, import_core4.Severity.Minor];
|
|
719
|
+
filteredIssues = issues.filter(
|
|
720
|
+
(issue) => allowedSeverities.includes(issue.severity)
|
|
721
|
+
);
|
|
722
|
+
}
|
|
723
|
+
const totalTokenCost = fileDuplicates.reduce(
|
|
724
|
+
(sum, dup) => sum + dup.tokenCost,
|
|
725
|
+
0
|
|
726
|
+
);
|
|
727
|
+
results.push({
|
|
728
|
+
fileName: file,
|
|
729
|
+
issues: filteredIssues,
|
|
730
|
+
metrics: {
|
|
731
|
+
tokenCost: totalTokenCost,
|
|
732
|
+
consistencyScore: Math.max(0, 1 - fileDuplicates.length * 0.1)
|
|
733
|
+
}
|
|
734
|
+
});
|
|
735
|
+
}
|
|
736
|
+
let groups;
|
|
737
|
+
let clusters;
|
|
738
|
+
if (groupByFilePair) {
|
|
739
|
+
groups = groupDuplicatesByFilePair(duplicates);
|
|
740
|
+
}
|
|
741
|
+
if (createClusters) {
|
|
742
|
+
const allClusters = createRefactorClusters(duplicates);
|
|
743
|
+
clusters = filterClustersByImpact(
|
|
744
|
+
allClusters,
|
|
745
|
+
minClusterTokenCost,
|
|
746
|
+
minClusterFiles
|
|
747
|
+
);
|
|
748
|
+
}
|
|
749
|
+
return { results, duplicates, files, groups, clusters, config: finalOptions };
|
|
750
|
+
}
|
|
751
|
+
function generateSummary(results) {
|
|
752
|
+
const allIssues = results.flatMap((r) => r.issues);
|
|
753
|
+
const totalTokenCost = results.reduce(
|
|
754
|
+
(sum, r) => sum + (r.metrics.tokenCost || 0),
|
|
755
|
+
0
|
|
756
|
+
);
|
|
757
|
+
const patternsByType = {
|
|
758
|
+
"api-handler": 0,
|
|
759
|
+
validator: 0,
|
|
760
|
+
utility: 0,
|
|
761
|
+
"class-method": 0,
|
|
762
|
+
component: 0,
|
|
763
|
+
function: 0,
|
|
764
|
+
unknown: 0
|
|
765
|
+
};
|
|
766
|
+
allIssues.forEach((issue) => {
|
|
767
|
+
const match = issue.message.match(/^(\S+(?:-\S+)*) pattern/);
|
|
768
|
+
if (match) {
|
|
769
|
+
const type = match[1];
|
|
770
|
+
patternsByType[type] = (patternsByType[type] || 0) + 1;
|
|
771
|
+
}
|
|
772
|
+
});
|
|
773
|
+
const topDuplicates = allIssues.slice(0, 10).map((issue) => {
|
|
774
|
+
const similarityMatch = issue.message.match(/(\d+)% similar/);
|
|
775
|
+
const tokenMatch = issue.message.match(/\((\d+) tokens/);
|
|
776
|
+
const typeMatch = issue.message.match(/^(\S+(?:-\S+)*) pattern/);
|
|
777
|
+
const fileMatch = issue.message.match(/similar to (.+?) \(/);
|
|
778
|
+
return {
|
|
779
|
+
files: [
|
|
780
|
+
{
|
|
781
|
+
path: issue.location.file,
|
|
782
|
+
startLine: issue.location.line,
|
|
783
|
+
endLine: 0
|
|
784
|
+
},
|
|
785
|
+
{
|
|
786
|
+
path: fileMatch?.[1] || "unknown",
|
|
787
|
+
startLine: 0,
|
|
788
|
+
endLine: 0
|
|
789
|
+
}
|
|
790
|
+
],
|
|
791
|
+
similarity: similarityMatch ? parseInt(similarityMatch[1]) / 100 : 0,
|
|
792
|
+
confidence: similarityMatch ? parseInt(similarityMatch[1]) / 100 : 0,
|
|
793
|
+
// Fallback for summary
|
|
794
|
+
patternType: typeMatch?.[1] || "unknown",
|
|
795
|
+
tokenCost: tokenMatch ? parseInt(tokenMatch[1]) : 0
|
|
796
|
+
};
|
|
797
|
+
});
|
|
798
|
+
return {
|
|
799
|
+
totalPatterns: allIssues.length,
|
|
800
|
+
totalTokenCost,
|
|
801
|
+
patternsByType,
|
|
802
|
+
topDuplicates
|
|
803
|
+
};
|
|
804
|
+
}
|
|
805
|
+
// Annotate the CommonJS export names for ESM import in node:
|
|
806
|
+
0 && (module.exports = {
|
|
807
|
+
analyzePatterns,
|
|
808
|
+
generateSummary,
|
|
809
|
+
getSmartDefaults
|
|
810
|
+
});
|