@aiready/pattern-detect 0.9.4 → 0.9.5

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,669 @@
1
+ // src/index.ts
2
+ import { readFileContent } from "@aiready/core";
3
+
4
+ // src/detector.ts
5
+ import { estimateTokens } from "@aiready/core";
6
+
7
+ // src/context-rules.ts
8
+ var CONTEXT_RULES = [
9
+ // Test Fixtures - Intentional duplication for test isolation
10
+ {
11
+ name: "test-fixtures",
12
+ detect: (file, code) => {
13
+ const isTestFile = file.includes(".test.") || file.includes(".spec.") || file.includes("__tests__") || file.includes("/test/") || file.includes("/tests/");
14
+ const hasTestFixtures = code.includes("beforeAll") || code.includes("afterAll") || code.includes("beforeEach") || code.includes("afterEach") || code.includes("setUp") || code.includes("tearDown");
15
+ return isTestFile && hasTestFixtures;
16
+ },
17
+ severity: "info",
18
+ reason: "Test fixture duplication is intentional for test isolation",
19
+ suggestion: "Consider if shared test setup would improve maintainability without coupling tests"
20
+ },
21
+ // Email/Document Templates - Often intentionally similar for consistency
22
+ {
23
+ name: "templates",
24
+ detect: (file, code) => {
25
+ const isTemplate = file.includes("/templates/") || file.includes("-template") || file.includes("/email-templates/") || file.includes("/emails/");
26
+ const hasTemplateContent = (code.includes("return") || code.includes("export")) && (code.includes("html") || code.includes("subject") || code.includes("body"));
27
+ return isTemplate && hasTemplateContent;
28
+ },
29
+ severity: "minor",
30
+ reason: "Template duplication may be intentional for maintainability and branding consistency",
31
+ suggestion: "Extract shared structure only if templates become hard to maintain"
32
+ },
33
+ // E2E/Integration Test Page Objects - Test independence
34
+ {
35
+ name: "e2e-page-objects",
36
+ detect: (file, code) => {
37
+ const isE2ETest = file.includes("e2e/") || file.includes("/e2e/") || file.includes(".e2e.") || file.includes("/playwright/") || file.includes("playwright/") || file.includes("/cypress/") || file.includes("cypress/") || file.includes("/integration/") || file.includes("integration/");
38
+ const hasPageObjectPatterns = code.includes("page.") || code.includes("await page") || code.includes("locator") || code.includes("getBy") || code.includes("selector") || code.includes("click(") || code.includes("fill(");
39
+ return isE2ETest && hasPageObjectPatterns;
40
+ },
41
+ severity: "minor",
42
+ reason: "E2E test duplication ensures test independence and reduces coupling",
43
+ suggestion: "Consider page object pattern only if duplication causes maintenance issues"
44
+ },
45
+ // Configuration Files - Often necessarily similar by design
46
+ {
47
+ name: "config-files",
48
+ detect: (file) => {
49
+ return file.endsWith(".config.ts") || file.endsWith(".config.js") || file.includes("jest.config") || file.includes("vite.config") || file.includes("webpack.config") || file.includes("rollup.config") || file.includes("tsconfig");
50
+ },
51
+ severity: "minor",
52
+ reason: "Configuration files often have similar structure by design",
53
+ suggestion: "Consider shared config base only if configurations become hard to maintain"
54
+ },
55
+ // Type Definitions - Duplication for type safety and module independence
56
+ {
57
+ name: "type-definitions",
58
+ detect: (file, code) => {
59
+ const isTypeFile = file.endsWith(".d.ts") || file.includes("/types/");
60
+ const hasTypeDefinitions = code.includes("interface ") || code.includes("type ") || code.includes("enum ");
61
+ return isTypeFile && hasTypeDefinitions;
62
+ },
63
+ severity: "info",
64
+ reason: "Type duplication may be intentional for module independence and type safety",
65
+ suggestion: "Extract to shared types package only if causing maintenance burden"
66
+ },
67
+ // Migration Scripts - One-off scripts that are similar by nature
68
+ {
69
+ name: "migration-scripts",
70
+ detect: (file) => {
71
+ return file.includes("/migrations/") || file.includes("/migrate/") || file.includes(".migration.");
72
+ },
73
+ severity: "info",
74
+ reason: "Migration scripts are typically one-off and intentionally similar",
75
+ suggestion: "Duplication is acceptable for migration scripts"
76
+ },
77
+ // Mock Data - Test data intentionally duplicated
78
+ {
79
+ name: "mock-data",
80
+ detect: (file, code) => {
81
+ const isMockFile = file.includes("/mocks/") || file.includes("/__mocks__/") || file.includes("/fixtures/") || file.includes(".mock.") || file.includes(".fixture.");
82
+ const hasMockData = code.includes("mock") || code.includes("Mock") || code.includes("fixture") || code.includes("stub") || code.includes("export const");
83
+ return isMockFile && hasMockData;
84
+ },
85
+ severity: "info",
86
+ reason: "Mock data duplication is expected for comprehensive test coverage",
87
+ suggestion: "Consider shared factories only for complex mock generation"
88
+ }
89
+ ];
90
+ function calculateSeverity(file1, file2, code, similarity, linesOfCode) {
91
+ for (const rule of CONTEXT_RULES) {
92
+ if (rule.detect(file1, code) || rule.detect(file2, code)) {
93
+ return {
94
+ severity: rule.severity,
95
+ reason: rule.reason,
96
+ suggestion: rule.suggestion,
97
+ matchedRule: rule.name
98
+ };
99
+ }
100
+ }
101
+ if (similarity >= 0.95 && linesOfCode >= 30) {
102
+ return {
103
+ severity: "critical",
104
+ reason: "Large nearly-identical code blocks waste tokens and create maintenance burden",
105
+ suggestion: "Extract to shared utility module immediately"
106
+ };
107
+ } else if (similarity >= 0.95 && linesOfCode >= 15) {
108
+ return {
109
+ severity: "major",
110
+ reason: "Nearly identical code should be consolidated",
111
+ suggestion: "Move to shared utility file"
112
+ };
113
+ } else if (similarity >= 0.85) {
114
+ return {
115
+ severity: "major",
116
+ reason: "High similarity indicates significant duplication",
117
+ suggestion: "Extract common logic to shared function"
118
+ };
119
+ } else if (similarity >= 0.7) {
120
+ return {
121
+ severity: "minor",
122
+ reason: "Moderate similarity detected",
123
+ suggestion: "Consider extracting shared patterns if code evolves together"
124
+ };
125
+ } else {
126
+ return {
127
+ severity: "minor",
128
+ reason: "Minor similarity detected",
129
+ suggestion: "Monitor but refactoring may not be worthwhile"
130
+ };
131
+ }
132
+ }
133
+ function getSeverityLabel(severity) {
134
+ const labels = {
135
+ critical: "\u{1F534} CRITICAL",
136
+ major: "\u{1F7E1} MAJOR",
137
+ minor: "\u{1F535} MINOR",
138
+ info: "\u2139\uFE0F INFO"
139
+ };
140
+ return labels[severity];
141
+ }
142
+ function filterBySeverity(duplicates, minSeverity) {
143
+ const severityOrder = ["info", "minor", "major", "critical"];
144
+ const minIndex = severityOrder.indexOf(minSeverity);
145
+ if (minIndex === -1) return duplicates;
146
+ return duplicates.filter((dup) => {
147
+ const dupIndex = severityOrder.indexOf(dup.severity);
148
+ return dupIndex >= minIndex;
149
+ });
150
+ }
151
+
152
+ // src/detector.ts
153
+ function categorizePattern(code) {
154
+ const lower = code.toLowerCase();
155
+ if (lower.includes("request") && lower.includes("response") || lower.includes("router.") || lower.includes("app.get") || lower.includes("app.post") || lower.includes("express") || lower.includes("ctx.body")) {
156
+ return "api-handler";
157
+ }
158
+ if (lower.includes("validate") || lower.includes("schema") || lower.includes("zod") || lower.includes("yup") || lower.includes("if") && lower.includes("throw")) {
159
+ return "validator";
160
+ }
161
+ if (lower.includes("return (") || lower.includes("jsx") || lower.includes("component") || lower.includes("props")) {
162
+ return "component";
163
+ }
164
+ if (lower.includes("class ") || lower.includes("this.")) {
165
+ return "class-method";
166
+ }
167
+ if (lower.includes("return ") && !lower.includes("this") && !lower.includes("new ")) {
168
+ return "utility";
169
+ }
170
+ if (lower.includes("function") || lower.includes("=>")) {
171
+ return "function";
172
+ }
173
+ return "unknown";
174
+ }
175
+ function extractCodeBlocks(content, minLines) {
176
+ const lines = content.split("\n");
177
+ const blocks = [];
178
+ let currentBlock = [];
179
+ let blockStart = 0;
180
+ let braceDepth = 0;
181
+ let inFunction = false;
182
+ for (let i = 0; i < lines.length; i++) {
183
+ const line = lines[i];
184
+ const trimmed = line.trim();
185
+ if (!inFunction && (trimmed.includes("function ") || trimmed.includes("=>") || trimmed.includes("async ") || /^(export\s+)?(async\s+)?function\s+/.test(trimmed) || /^(export\s+)?const\s+\w+\s*=\s*(async\s*)?\(/.test(trimmed))) {
186
+ inFunction = true;
187
+ blockStart = i;
188
+ }
189
+ for (const char of line) {
190
+ if (char === "{") braceDepth++;
191
+ if (char === "}") braceDepth--;
192
+ }
193
+ if (inFunction) {
194
+ currentBlock.push(line);
195
+ }
196
+ if (inFunction && braceDepth === 0 && currentBlock.length >= minLines) {
197
+ const blockContent = currentBlock.join("\n");
198
+ const linesOfCode = currentBlock.filter(
199
+ (l) => l.trim() && !l.trim().startsWith("//")
200
+ ).length;
201
+ blocks.push({
202
+ content: blockContent,
203
+ startLine: blockStart + 1,
204
+ endLine: i + 1,
205
+ patternType: categorizePattern(blockContent),
206
+ linesOfCode
207
+ });
208
+ currentBlock = [];
209
+ inFunction = false;
210
+ } else if (inFunction && braceDepth === 0) {
211
+ currentBlock = [];
212
+ inFunction = false;
213
+ }
214
+ }
215
+ return blocks;
216
+ }
217
+ function normalizeCode(code) {
218
+ return code.replace(/\/\/.*$/gm, "").replace(/\/\*[\s\S]*?\*\//g, "").replace(/"[^"]*"/g, '"STR"').replace(/'[^']*'/g, "'STR'").replace(/`[^`]*`/g, "`STR`").replace(/\b\d+\b/g, "NUM").replace(/\s+/g, " ").trim();
219
+ }
220
+ function jaccardSimilarity(tokens1, tokens2) {
221
+ const set1 = new Set(tokens1);
222
+ const set2 = new Set(tokens2);
223
+ let intersection = 0;
224
+ for (const token of set1) {
225
+ if (set2.has(token)) intersection++;
226
+ }
227
+ const union = set1.size + set2.size - intersection;
228
+ return union === 0 ? 0 : intersection / union;
229
+ }
230
+ async function detectDuplicatePatterns(files, options) {
231
+ const {
232
+ minSimilarity,
233
+ minLines,
234
+ batchSize = 100,
235
+ approx = true,
236
+ minSharedTokens = 8,
237
+ maxCandidatesPerBlock = 100,
238
+ streamResults = false
239
+ } = options;
240
+ const duplicates = [];
241
+ const maxComparisons = approx ? Infinity : 5e5;
242
+ const allBlocks = files.flatMap(
243
+ (file) => extractCodeBlocks(file.content, minLines).map((block) => ({
244
+ content: block.content,
245
+ startLine: block.startLine,
246
+ endLine: block.endLine,
247
+ file: file.file,
248
+ normalized: normalizeCode(block.content),
249
+ patternType: block.patternType,
250
+ tokenCost: estimateTokens(block.content),
251
+ linesOfCode: block.linesOfCode
252
+ }))
253
+ );
254
+ console.log(`Extracted ${allBlocks.length} code blocks for analysis`);
255
+ if (!approx && allBlocks.length > 500) {
256
+ console.log(`\u26A0\uFE0F Using --no-approx mode with ${allBlocks.length} blocks may be slow (O(B\xB2) complexity).`);
257
+ console.log(` Consider using approximate mode (default) for better performance.`);
258
+ }
259
+ const stopwords = /* @__PURE__ */ new Set([
260
+ "return",
261
+ "const",
262
+ "let",
263
+ "var",
264
+ "function",
265
+ "class",
266
+ "new",
267
+ "if",
268
+ "else",
269
+ "for",
270
+ "while",
271
+ "async",
272
+ "await",
273
+ "try",
274
+ "catch",
275
+ "switch",
276
+ "case",
277
+ "default",
278
+ "import",
279
+ "export",
280
+ "from",
281
+ "true",
282
+ "false",
283
+ "null",
284
+ "undefined",
285
+ "this"
286
+ ]);
287
+ const tokenize = (norm) => norm.split(/[\s(){}\[\];,\.]+/).filter((t) => t && t.length >= 3 && !stopwords.has(t.toLowerCase()));
288
+ const blockTokens = allBlocks.map((b) => tokenize(b.normalized));
289
+ const invertedIndex = /* @__PURE__ */ new Map();
290
+ if (approx) {
291
+ for (let i = 0; i < blockTokens.length; i++) {
292
+ for (const tok of blockTokens[i]) {
293
+ let arr = invertedIndex.get(tok);
294
+ if (!arr) {
295
+ arr = [];
296
+ invertedIndex.set(tok, arr);
297
+ }
298
+ arr.push(i);
299
+ }
300
+ }
301
+ }
302
+ const totalComparisons = approx ? void 0 : allBlocks.length * (allBlocks.length - 1) / 2;
303
+ if (totalComparisons !== void 0) {
304
+ console.log(`Processing ${totalComparisons.toLocaleString()} comparisons in batches...`);
305
+ } else {
306
+ console.log(`Using approximate candidate selection to reduce comparisons...`);
307
+ }
308
+ let comparisonsProcessed = 0;
309
+ let comparisonsBudgetExhausted = false;
310
+ const startTime = Date.now();
311
+ for (let i = 0; i < allBlocks.length; i++) {
312
+ if (maxComparisons && comparisonsProcessed >= maxComparisons) {
313
+ comparisonsBudgetExhausted = true;
314
+ break;
315
+ }
316
+ if (i % batchSize === 0 && i > 0) {
317
+ const elapsed = ((Date.now() - startTime) / 1e3).toFixed(1);
318
+ const duplicatesFound = duplicates.length;
319
+ if (totalComparisons !== void 0) {
320
+ const progress = (comparisonsProcessed / totalComparisons * 100).toFixed(1);
321
+ const remaining = totalComparisons - comparisonsProcessed;
322
+ const rate = comparisonsProcessed / parseFloat(elapsed);
323
+ const eta = remaining > 0 ? (remaining / rate).toFixed(0) : 0;
324
+ console.log(` ${progress}% (${comparisonsProcessed.toLocaleString()}/${totalComparisons.toLocaleString()} comparisons, ${elapsed}s elapsed, ~${eta}s remaining, ${duplicatesFound} duplicates)`);
325
+ } else {
326
+ console.log(` Processed ${i.toLocaleString()}/${allBlocks.length} blocks (${elapsed}s elapsed, ${duplicatesFound} duplicates)`);
327
+ }
328
+ await new Promise((resolve) => setImmediate(resolve));
329
+ }
330
+ const block1 = allBlocks[i];
331
+ let candidates = null;
332
+ if (approx) {
333
+ const counts = /* @__PURE__ */ new Map();
334
+ const block1Tokens = new Set(blockTokens[i]);
335
+ const block1Size = block1Tokens.size;
336
+ const rareTokens = blockTokens[i].filter((tok) => {
337
+ const blocksWithToken = invertedIndex.get(tok)?.length || 0;
338
+ return blocksWithToken < allBlocks.length * 0.1;
339
+ });
340
+ for (const tok of rareTokens) {
341
+ const ids = invertedIndex.get(tok);
342
+ if (!ids) continue;
343
+ for (const j of ids) {
344
+ if (j <= i) continue;
345
+ if (allBlocks[j].file === block1.file) continue;
346
+ counts.set(j, (counts.get(j) || 0) + 1);
347
+ }
348
+ }
349
+ candidates = Array.from(counts.entries()).filter(([j, shared]) => {
350
+ const block2Tokens = blockTokens[j];
351
+ const block2Size = block2Tokens.length;
352
+ const minSize = Math.min(block1Size, block2Size);
353
+ const sharedPercentage = shared / minSize;
354
+ return shared >= minSharedTokens && sharedPercentage >= 0.3;
355
+ }).sort((a, b) => b[1] - a[1]).slice(0, Math.min(maxCandidatesPerBlock, 5)).map(([j, shared]) => ({ j, shared }));
356
+ }
357
+ if (approx && candidates) {
358
+ for (const { j } of candidates) {
359
+ if (!approx && maxComparisons !== Infinity && comparisonsProcessed >= maxComparisons) {
360
+ console.log(`\u26A0\uFE0F Comparison safety limit reached (${maxComparisons.toLocaleString()} comparisons in --no-approx mode).`);
361
+ console.log(` This prevents excessive runtime on large repos. Consider using approximate mode (default) or --min-lines to reduce blocks.`);
362
+ break;
363
+ }
364
+ comparisonsProcessed++;
365
+ const block2 = allBlocks[j];
366
+ const similarity = jaccardSimilarity(blockTokens[i], blockTokens[j]);
367
+ if (similarity >= minSimilarity) {
368
+ const { severity, reason, suggestion, matchedRule } = calculateSeverity(
369
+ block1.file,
370
+ block2.file,
371
+ block1.content,
372
+ similarity,
373
+ block1.linesOfCode
374
+ );
375
+ const duplicate = {
376
+ file1: block1.file,
377
+ file2: block2.file,
378
+ line1: block1.startLine,
379
+ line2: block2.startLine,
380
+ endLine1: block1.endLine,
381
+ endLine2: block2.endLine,
382
+ similarity,
383
+ snippet: block1.content.split("\n").slice(0, 5).join("\n") + "\n...",
384
+ patternType: block1.patternType,
385
+ tokenCost: block1.tokenCost + block2.tokenCost,
386
+ linesOfCode: block1.linesOfCode,
387
+ severity,
388
+ reason,
389
+ suggestion,
390
+ matchedRule
391
+ };
392
+ duplicates.push(duplicate);
393
+ if (streamResults) {
394
+ console.log(`
395
+ \u2705 Found: ${duplicate.patternType} ${Math.round(similarity * 100)}% similar`);
396
+ console.log(` ${duplicate.file1}:${duplicate.line1}-${duplicate.endLine1} \u21D4 ${duplicate.file2}:${duplicate.line2}-${duplicate.endLine2}`);
397
+ console.log(` Token cost: ${duplicate.tokenCost.toLocaleString()}`);
398
+ }
399
+ }
400
+ }
401
+ } else {
402
+ for (let j = i + 1; j < allBlocks.length; j++) {
403
+ if (maxComparisons && comparisonsProcessed >= maxComparisons) break;
404
+ comparisonsProcessed++;
405
+ const block2 = allBlocks[j];
406
+ if (block1.file === block2.file) continue;
407
+ const similarity = jaccardSimilarity(blockTokens[i], blockTokens[j]);
408
+ if (similarity >= minSimilarity) {
409
+ const { severity, reason, suggestion, matchedRule } = calculateSeverity(
410
+ block1.file,
411
+ block2.file,
412
+ block1.content,
413
+ similarity,
414
+ block1.linesOfCode
415
+ );
416
+ const duplicate = {
417
+ file1: block1.file,
418
+ file2: block2.file,
419
+ line1: block1.startLine,
420
+ line2: block2.startLine,
421
+ endLine1: block1.endLine,
422
+ endLine2: block2.endLine,
423
+ similarity,
424
+ snippet: block1.content.split("\n").slice(0, 5).join("\n") + "\n...",
425
+ patternType: block1.patternType,
426
+ tokenCost: block1.tokenCost + block2.tokenCost,
427
+ linesOfCode: block1.linesOfCode,
428
+ severity,
429
+ reason,
430
+ suggestion,
431
+ matchedRule
432
+ };
433
+ duplicates.push(duplicate);
434
+ if (streamResults) {
435
+ console.log(`
436
+ \u2705 Found: ${duplicate.patternType} ${Math.round(similarity * 100)}% similar`);
437
+ console.log(` ${duplicate.file1}:${duplicate.line1}-${duplicate.endLine1} \u21D4 ${duplicate.file2}:${duplicate.line2}-${duplicate.endLine2}`);
438
+ console.log(` Token cost: ${duplicate.tokenCost.toLocaleString()}`);
439
+ }
440
+ }
441
+ }
442
+ }
443
+ }
444
+ if (comparisonsBudgetExhausted) {
445
+ console.log(`\u26A0\uFE0F Comparison budget exhausted (${maxComparisons.toLocaleString()} comparisons). Use --max-comparisons to increase.`);
446
+ }
447
+ return duplicates.sort(
448
+ (a, b) => b.similarity - a.similarity || b.tokenCost - a.tokenCost
449
+ );
450
+ }
451
+
452
+ // src/index.ts
453
+ function getRefactoringSuggestion(patternType, similarity) {
454
+ const baseMessages = {
455
+ "api-handler": "Extract common middleware or create a base handler class",
456
+ validator: "Consolidate validation logic into shared schema validators (Zod/Yup)",
457
+ utility: "Move to a shared utilities file and reuse across modules",
458
+ "class-method": "Consider inheritance or composition to share behavior",
459
+ component: "Extract shared logic into a custom hook or HOC",
460
+ function: "Extract into a shared helper function",
461
+ unknown: "Extract common logic into a reusable module"
462
+ };
463
+ const urgency = similarity > 0.95 ? " (CRITICAL: Nearly identical code)" : similarity > 0.9 ? " (HIGH: Very similar, refactor soon)" : "";
464
+ return baseMessages[patternType] + urgency;
465
+ }
466
+ async function getSmartDefaults(directory, userOptions) {
467
+ if (userOptions.useSmartDefaults === false) {
468
+ return {
469
+ rootDir: directory,
470
+ minSimilarity: 0.6,
471
+ minLines: 8,
472
+ batchSize: 100,
473
+ approx: true,
474
+ minSharedTokens: 12,
475
+ maxCandidatesPerBlock: 5,
476
+ streamResults: false,
477
+ severity: "all",
478
+ includeTests: false
479
+ };
480
+ }
481
+ const scanOptions = {
482
+ rootDir: directory,
483
+ include: userOptions.include || ["**/*.{ts,tsx,js,jsx,py,java}"],
484
+ exclude: userOptions.exclude
485
+ };
486
+ const { scanFiles: scanFiles2 } = await import("@aiready/core");
487
+ const files = await scanFiles2(scanOptions);
488
+ const estimatedBlocks = files.length * 3;
489
+ const maxCandidatesPerBlock = Math.max(3, Math.min(10, Math.floor(3e4 / estimatedBlocks)));
490
+ const minSimilarity = Math.min(0.75, 0.5 + estimatedBlocks / 1e4 * 0.25);
491
+ const minLines = Math.max(6, Math.min(12, 6 + Math.floor(estimatedBlocks / 2e3)));
492
+ const minSharedTokens = Math.max(10, Math.min(20, 10 + Math.floor(estimatedBlocks / 2e3)));
493
+ const batchSize = estimatedBlocks > 1e3 ? 200 : 100;
494
+ const severity = estimatedBlocks > 5e3 ? "high" : "all";
495
+ let defaults = {
496
+ rootDir: directory,
497
+ minSimilarity,
498
+ minLines,
499
+ batchSize,
500
+ approx: true,
501
+ minSharedTokens,
502
+ maxCandidatesPerBlock,
503
+ streamResults: false,
504
+ severity,
505
+ includeTests: false
506
+ };
507
+ const result = { ...defaults };
508
+ for (const [key, value] of Object.entries(defaults)) {
509
+ if (key in userOptions && userOptions[key] !== void 0) {
510
+ result[key] = userOptions[key];
511
+ }
512
+ }
513
+ return result;
514
+ }
515
+ function logConfiguration(config, estimatedBlocks) {
516
+ console.log("\u{1F4CB} Configuration:");
517
+ console.log(` Repository size: ~${estimatedBlocks} code blocks`);
518
+ console.log(` Similarity threshold: ${config.minSimilarity}`);
519
+ console.log(` Minimum lines: ${config.minLines}`);
520
+ console.log(` Approximate mode: ${config.approx ? "enabled" : "disabled"}`);
521
+ console.log(` Max candidates per block: ${config.maxCandidatesPerBlock}`);
522
+ console.log(` Min shared tokens: ${config.minSharedTokens}`);
523
+ console.log(` Severity filter: ${config.severity}`);
524
+ console.log(` Include tests: ${config.includeTests}`);
525
+ console.log("");
526
+ }
527
+ async function analyzePatterns(options) {
528
+ const smartDefaults = await getSmartDefaults(options.rootDir || ".", options);
529
+ const finalOptions = { ...smartDefaults, ...options };
530
+ const {
531
+ minSimilarity = 0.4,
532
+ minLines = 5,
533
+ batchSize = 100,
534
+ approx = true,
535
+ minSharedTokens = 8,
536
+ maxCandidatesPerBlock = 100,
537
+ streamResults = false,
538
+ severity = "all",
539
+ includeTests = false,
540
+ ...scanOptions
541
+ } = finalOptions;
542
+ const { scanFiles: scanFiles2 } = await import("@aiready/core");
543
+ const files = await scanFiles2(scanOptions);
544
+ const estimatedBlocks = files.length * 3;
545
+ logConfiguration(finalOptions, estimatedBlocks);
546
+ const results = [];
547
+ const fileContents = await Promise.all(
548
+ files.map(async (file) => ({
549
+ file,
550
+ content: await readFileContent(file)
551
+ }))
552
+ );
553
+ const duplicates = await detectDuplicatePatterns(fileContents, {
554
+ minSimilarity,
555
+ minLines,
556
+ batchSize,
557
+ approx,
558
+ minSharedTokens,
559
+ maxCandidatesPerBlock,
560
+ streamResults
561
+ });
562
+ for (const file of files) {
563
+ const fileDuplicates = duplicates.filter(
564
+ (dup) => dup.file1 === file || dup.file2 === file
565
+ );
566
+ const issues = fileDuplicates.map((dup) => {
567
+ const otherFile = dup.file1 === file ? dup.file2 : dup.file1;
568
+ const severity2 = dup.similarity > 0.95 ? "critical" : dup.similarity > 0.9 ? "major" : "minor";
569
+ return {
570
+ type: "duplicate-pattern",
571
+ severity: severity2,
572
+ message: `${dup.patternType} pattern ${Math.round(dup.similarity * 100)}% similar to ${otherFile} (${dup.tokenCost} tokens wasted)`,
573
+ location: {
574
+ file,
575
+ line: dup.file1 === file ? dup.line1 : dup.line2
576
+ },
577
+ suggestion: getRefactoringSuggestion(dup.patternType, dup.similarity)
578
+ };
579
+ });
580
+ let filteredIssues = issues;
581
+ if (severity !== "all") {
582
+ const severityMap = {
583
+ critical: ["critical"],
584
+ high: ["critical", "major"],
585
+ medium: ["critical", "major", "minor"]
586
+ };
587
+ const allowedSeverities = severityMap[severity] || ["critical", "major", "minor"];
588
+ filteredIssues = issues.filter((issue) => allowedSeverities.includes(issue.severity));
589
+ }
590
+ const totalTokenCost = fileDuplicates.reduce(
591
+ (sum, dup) => sum + dup.tokenCost,
592
+ 0
593
+ );
594
+ results.push({
595
+ fileName: file,
596
+ issues: filteredIssues,
597
+ metrics: {
598
+ tokenCost: totalTokenCost,
599
+ consistencyScore: Math.max(0, 1 - fileDuplicates.length * 0.1)
600
+ }
601
+ });
602
+ }
603
+ return { results, duplicates, files };
604
+ }
605
+ function generateSummary(results) {
606
+ const allIssues = results.flatMap((r) => r.issues);
607
+ const totalTokenCost = results.reduce(
608
+ (sum, r) => sum + (r.metrics.tokenCost || 0),
609
+ 0
610
+ );
611
+ const patternsByType = {
612
+ "api-handler": 0,
613
+ validator: 0,
614
+ utility: 0,
615
+ "class-method": 0,
616
+ component: 0,
617
+ function: 0,
618
+ unknown: 0
619
+ };
620
+ allIssues.forEach((issue) => {
621
+ const match = issue.message.match(/^(\S+(?:-\S+)*) pattern/);
622
+ if (match) {
623
+ const type = match[1];
624
+ patternsByType[type] = (patternsByType[type] || 0) + 1;
625
+ }
626
+ });
627
+ const topDuplicates = allIssues.slice(0, 10).map((issue) => {
628
+ const similarityMatch = issue.message.match(/(\d+)% similar/);
629
+ const tokenMatch = issue.message.match(/\((\d+) tokens/);
630
+ const typeMatch = issue.message.match(/^(\S+(?:-\S+)*) pattern/);
631
+ const fileMatch = issue.message.match(/similar to (.+?) \(/);
632
+ return {
633
+ files: [
634
+ {
635
+ path: issue.location.file,
636
+ startLine: issue.location.line,
637
+ endLine: 0
638
+ // Not available from Issue
639
+ },
640
+ {
641
+ path: fileMatch?.[1] || "unknown",
642
+ startLine: 0,
643
+ // Not available from Issue
644
+ endLine: 0
645
+ // Not available from Issue
646
+ }
647
+ ],
648
+ similarity: similarityMatch ? parseInt(similarityMatch[1]) / 100 : 0,
649
+ patternType: typeMatch?.[1] || "unknown",
650
+ tokenCost: tokenMatch ? parseInt(tokenMatch[1]) : 0
651
+ };
652
+ });
653
+ return {
654
+ totalPatterns: allIssues.length,
655
+ totalTokenCost,
656
+ patternsByType,
657
+ topDuplicates
658
+ };
659
+ }
660
+
661
+ export {
662
+ calculateSeverity,
663
+ getSeverityLabel,
664
+ filterBySeverity,
665
+ detectDuplicatePatterns,
666
+ getSmartDefaults,
667
+ analyzePatterns,
668
+ generateSummary
669
+ };