@aiready/pattern-detect 0.12.0 → 0.12.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,754 @@
1
+ // src/index.ts
2
+ import { readFileContent, Severity as Severity4, IssueType } from "@aiready/core";
3
+
4
+ // src/detector.ts
5
+ import { estimateTokens } from "@aiready/core";
6
+
7
+ // src/context-rules.ts
8
+ import { Severity } from "@aiready/core";
9
+ var CONTEXT_RULES = [
10
+ // Test Fixtures - Intentional duplication for test isolation
11
+ {
12
+ name: "test-fixtures",
13
+ detect: (file, code) => {
14
+ const isTestFile = file.includes(".test.") || file.includes(".spec.") || file.includes("__tests__") || file.includes("/test/") || file.includes("/tests/");
15
+ const hasTestFixtures = code.includes("beforeAll") || code.includes("afterAll") || code.includes("beforeEach") || code.includes("afterEach") || code.includes("setUp") || code.includes("tearDown");
16
+ return isTestFile && hasTestFixtures;
17
+ },
18
+ severity: Severity.Info,
19
+ reason: "Test fixture duplication is intentional for test isolation",
20
+ suggestion: "Consider if shared test setup would improve maintainability without coupling tests"
21
+ },
22
+ // Email/Document Templates - Often intentionally similar for consistency
23
+ {
24
+ name: "templates",
25
+ detect: (file, code) => {
26
+ const isTemplate = file.includes("/templates/") || file.includes("-template") || file.includes("/email-templates/") || file.includes("/emails/");
27
+ const hasTemplateContent = (code.includes("return") || code.includes("export")) && (code.includes("html") || code.includes("subject") || code.includes("body"));
28
+ return isTemplate && hasTemplateContent;
29
+ },
30
+ severity: Severity.Minor,
31
+ reason: "Template duplication may be intentional for maintainability and branding consistency",
32
+ suggestion: "Extract shared structure only if templates become hard to maintain"
33
+ },
34
+ // E2E/Integration Test Page Objects - Test independence
35
+ {
36
+ name: "e2e-page-objects",
37
+ detect: (file, code) => {
38
+ const isE2ETest = file.includes("e2e/") || file.includes("/e2e/") || file.includes(".e2e.") || file.includes("/playwright/") || file.includes("playwright/") || file.includes("/cypress/") || file.includes("cypress/") || file.includes("/integration/") || file.includes("integration/");
39
+ const hasPageObjectPatterns = code.includes("page.") || code.includes("await page") || code.includes("locator") || code.includes("getBy") || code.includes("selector") || code.includes("click(") || code.includes("fill(");
40
+ return isE2ETest && hasPageObjectPatterns;
41
+ },
42
+ severity: Severity.Minor,
43
+ reason: "E2E test duplication ensures test independence and reduces coupling",
44
+ suggestion: "Consider page object pattern only if duplication causes maintenance issues"
45
+ },
46
+ // Configuration Files - Often necessarily similar by design
47
+ {
48
+ name: "config-files",
49
+ detect: (file) => {
50
+ return file.endsWith(".config.ts") || file.endsWith(".config.js") || file.includes("jest.config") || file.includes("vite.config") || file.includes("webpack.config") || file.includes("rollup.config") || file.includes("tsconfig");
51
+ },
52
+ severity: Severity.Minor,
53
+ reason: "Configuration files often have similar structure by design",
54
+ suggestion: "Consider shared config base only if configurations become hard to maintain"
55
+ },
56
+ // Type Definitions - Duplication for type safety and module independence
57
+ {
58
+ name: "type-definitions",
59
+ detect: (file, code) => {
60
+ const isTypeFile = file.endsWith(".d.ts") || file.includes("/types/");
61
+ const hasTypeDefinitions = code.includes("interface ") || code.includes("type ") || code.includes("enum ");
62
+ return isTypeFile && hasTypeDefinitions;
63
+ },
64
+ severity: Severity.Info,
65
+ reason: "Type duplication may be intentional for module independence and type safety",
66
+ suggestion: "Extract to shared types package only if causing maintenance burden"
67
+ },
68
+ // Migration Scripts - One-off scripts that are similar by nature
69
+ {
70
+ name: "migration-scripts",
71
+ detect: (file) => {
72
+ return file.includes("/migrations/") || file.includes("/migrate/") || file.includes(".migration.");
73
+ },
74
+ severity: Severity.Info,
75
+ reason: "Migration scripts are typically one-off and intentionally similar",
76
+ suggestion: "Duplication is acceptable for migration scripts"
77
+ },
78
+ // Mock Data - Test data intentionally duplicated
79
+ {
80
+ name: "mock-data",
81
+ detect: (file, code) => {
82
+ const isMockFile = file.includes("/mocks/") || file.includes("/__mocks__/") || file.includes("/fixtures/") || file.includes(".mock.") || file.includes(".fixture.");
83
+ const hasMockData = code.includes("mock") || code.includes("Mock") || code.includes("fixture") || code.includes("stub") || code.includes("export const");
84
+ return isMockFile && hasMockData;
85
+ },
86
+ severity: Severity.Info,
87
+ reason: "Mock data duplication is expected for comprehensive test coverage",
88
+ suggestion: "Consider shared factories only for complex mock generation"
89
+ }
90
+ ];
91
+ function calculateSeverity(file1, file2, code, similarity, linesOfCode) {
92
+ for (const rule of CONTEXT_RULES) {
93
+ if (rule.detect(file1, code) || rule.detect(file2, code)) {
94
+ return {
95
+ severity: rule.severity,
96
+ reason: rule.reason,
97
+ suggestion: rule.suggestion,
98
+ matchedRule: rule.name
99
+ };
100
+ }
101
+ }
102
+ if (similarity >= 0.95 && linesOfCode >= 30) {
103
+ return {
104
+ severity: Severity.Critical,
105
+ reason: "Large nearly-identical code blocks waste tokens and create maintenance burden",
106
+ suggestion: "Extract to shared utility module immediately"
107
+ };
108
+ } else if (similarity >= 0.95 && linesOfCode >= 15) {
109
+ return {
110
+ severity: Severity.Major,
111
+ reason: "Nearly identical code should be consolidated",
112
+ suggestion: "Move to shared utility file"
113
+ };
114
+ } else if (similarity >= 0.85) {
115
+ return {
116
+ severity: Severity.Major,
117
+ reason: "High similarity indicates significant duplication",
118
+ suggestion: "Extract common logic to shared function"
119
+ };
120
+ } else if (similarity >= 0.7) {
121
+ return {
122
+ severity: Severity.Minor,
123
+ reason: "Moderate similarity detected",
124
+ suggestion: "Consider extracting shared patterns if code evolves together"
125
+ };
126
+ } else {
127
+ return {
128
+ severity: Severity.Minor,
129
+ reason: "Minor similarity detected",
130
+ suggestion: "Monitor but refactoring may not be worthwhile"
131
+ };
132
+ }
133
+ }
134
+ function getSeverityLabel(severity) {
135
+ const labels = {
136
+ [Severity.Critical]: "\u{1F534} CRITICAL",
137
+ [Severity.Major]: "\u{1F7E1} MAJOR",
138
+ [Severity.Minor]: "\u{1F535} MINOR",
139
+ [Severity.Info]: "\u2139\uFE0F INFO"
140
+ };
141
+ return labels[severity];
142
+ }
143
+ function filterBySeverity(duplicates, minSeverity) {
144
+ const severityOrder = [
145
+ Severity.Info,
146
+ Severity.Minor,
147
+ Severity.Major,
148
+ Severity.Critical
149
+ ];
150
+ const minIndex = severityOrder.indexOf(minSeverity);
151
+ if (minIndex === -1) return duplicates;
152
+ return duplicates.filter((dup) => {
153
+ const dupIndex = severityOrder.indexOf(dup.severity);
154
+ return dupIndex >= minIndex;
155
+ });
156
+ }
157
+
158
+ // src/detector.ts
159
+ function normalizeCode(code) {
160
+ return code.replace(/\s+/g, " ").replace(/['"]/g, '"').replace(/\/\/.*/g, "").replace(/\/\*[\s\S]*?\*\//g, "").trim();
161
+ }
162
+ function extractBlocks(file, content) {
163
+ const blocks = [];
164
+ const lines = content.split("\n");
165
+ const blockRegex = /^(?:export\s+)?(?:async\s+)?(function|class|const|interface|type)\s+([a-zA-Z0-9_]+)/gm;
166
+ let match;
167
+ while ((match = blockRegex.exec(content)) !== null) {
168
+ const startLine = content.substring(0, match.index).split("\n").length;
169
+ const type = match[1];
170
+ let endLine = startLine + 5;
171
+ let openBraces = 0;
172
+ let foundStart = false;
173
+ for (let i = match.index; i < content.length; i++) {
174
+ if (content[i] === "{") {
175
+ openBraces++;
176
+ foundStart = true;
177
+ } else if (content[i] === "}") {
178
+ openBraces--;
179
+ }
180
+ if (foundStart && openBraces === 0) {
181
+ endLine = content.substring(0, i).split("\n").length;
182
+ break;
183
+ }
184
+ }
185
+ const blockCode = lines.slice(startLine - 1, endLine).join("\n");
186
+ const tokens = estimateTokens(blockCode);
187
+ blocks.push({
188
+ file,
189
+ startLine,
190
+ endLine,
191
+ code: blockCode,
192
+ tokens,
193
+ patternType: inferPatternType(type, match[2])
194
+ });
195
+ }
196
+ return blocks;
197
+ }
198
+ function inferPatternType(keyword, name) {
199
+ const n = name.toLowerCase();
200
+ if (n.includes("handler") || n.includes("controller")) return "api-handler";
201
+ if (n.includes("validate") || n.includes("schema")) return "validator";
202
+ if (n.includes("util") || n.includes("helper")) return "utility";
203
+ if (keyword === "class") return "class-method";
204
+ if (n.match(/^[A-Z]/)) return "component";
205
+ if (keyword === "function") return "function";
206
+ return "unknown";
207
+ }
208
+ function calculateSimilarity(a, b) {
209
+ if (a === b) return 1;
210
+ const setA = new Set(a.split(" "));
211
+ const setB = new Set(b.split(" "));
212
+ const intersection = new Set([...setA].filter((x) => setB.has(x)));
213
+ const union = /* @__PURE__ */ new Set([...setA, ...setB]);
214
+ return intersection.size / union.size;
215
+ }
216
+ async function detectDuplicatePatterns(fileContents, options) {
217
+ const { minSimilarity, minLines, batchSize, streamResults } = options;
218
+ const allBlocks = [];
219
+ for (const { file, content } of fileContents) {
220
+ const blocks = extractBlocks(file, content);
221
+ allBlocks.push(...blocks.filter((b) => b.endLine - b.startLine >= minLines));
222
+ }
223
+ const duplicates = [];
224
+ for (let i = 0; i < allBlocks.length; i++) {
225
+ for (let j = i + 1; j < allBlocks.length; j++) {
226
+ const b1 = allBlocks[i];
227
+ const b2 = allBlocks[j];
228
+ if (b1.file === b2.file) continue;
229
+ const norm1 = normalizeCode(b1.code);
230
+ const norm2 = normalizeCode(b2.code);
231
+ const sim = calculateSimilarity(norm1, norm2);
232
+ if (sim >= minSimilarity) {
233
+ const { severity, reason, suggestion, matchedRule } = calculateSeverity(
234
+ b1.file,
235
+ b2.file,
236
+ b1.code,
237
+ sim,
238
+ b1.endLine - b1.startLine
239
+ );
240
+ const dup = {
241
+ file1: b1.file,
242
+ line1: b1.startLine,
243
+ endLine1: b1.endLine,
244
+ file2: b2.file,
245
+ line2: b2.startLine,
246
+ endLine2: b2.endLine,
247
+ code1: b1.code,
248
+ code2: b2.code,
249
+ similarity: sim,
250
+ patternType: b1.patternType,
251
+ tokenCost: b1.tokens + b2.tokens,
252
+ severity,
253
+ reason,
254
+ suggestion,
255
+ matchedRule
256
+ };
257
+ duplicates.push(dup);
258
+ if (streamResults)
259
+ console.log(
260
+ `[DUPLICATE] ${dup.file1}:${dup.line1} <-> ${dup.file2}:${dup.line2} (${Math.round(sim * 100)}%)`
261
+ );
262
+ }
263
+ }
264
+ }
265
+ return duplicates;
266
+ }
267
+
268
+ // src/grouping.ts
269
+ import { Severity as Severity3 } from "@aiready/core";
270
+ function groupDuplicatesByFilePair(duplicates) {
271
+ const groups = /* @__PURE__ */ new Map();
272
+ for (const dup of duplicates) {
273
+ const files = [dup.file1, dup.file2].sort();
274
+ const key = files.join("::");
275
+ if (!groups.has(key)) {
276
+ groups.set(key, {
277
+ filePair: key,
278
+ severity: dup.severity,
279
+ occurrences: 0,
280
+ totalTokenCost: 0,
281
+ averageSimilarity: 0,
282
+ patternTypes: /* @__PURE__ */ new Set(),
283
+ lineRanges: []
284
+ });
285
+ }
286
+ const group = groups.get(key);
287
+ group.occurrences++;
288
+ group.totalTokenCost += dup.tokenCost;
289
+ group.averageSimilarity += dup.similarity;
290
+ group.patternTypes.add(dup.patternType);
291
+ group.lineRanges.push({
292
+ file1: { start: dup.line1, end: dup.endLine1 },
293
+ file2: { start: dup.line2, end: dup.endLine2 }
294
+ });
295
+ const severityOrder = { [Severity3.Critical]: 3, [Severity3.Major]: 2, [Severity3.Minor]: 1, [Severity3.Info]: 0 };
296
+ const currentSev = dup.severity;
297
+ if (severityOrder[currentSev] > severityOrder[group.severity]) {
298
+ group.severity = currentSev;
299
+ }
300
+ }
301
+ return Array.from(groups.values()).map((g) => ({
302
+ ...g,
303
+ averageSimilarity: g.averageSimilarity / g.occurrences
304
+ }));
305
+ }
306
+ function createRefactorClusters(duplicates) {
307
+ const adjacency = /* @__PURE__ */ new Map();
308
+ const fileData = /* @__PURE__ */ new Map();
309
+ for (const dup of duplicates) {
310
+ if (!adjacency.has(dup.file1)) adjacency.set(dup.file1, /* @__PURE__ */ new Set());
311
+ if (!adjacency.has(dup.file2)) adjacency.set(dup.file2, /* @__PURE__ */ new Set());
312
+ adjacency.get(dup.file1).add(dup.file2);
313
+ adjacency.get(dup.file2).add(dup.file1);
314
+ [dup.file1, dup.file2].forEach((f) => {
315
+ if (!fileData.has(f)) fileData.set(f, { tokenCost: 0, similarity: 0, count: 0 });
316
+ const data = fileData.get(f);
317
+ data.tokenCost += dup.tokenCost;
318
+ data.similarity += dup.similarity;
319
+ data.count++;
320
+ });
321
+ }
322
+ const visited = /* @__PURE__ */ new Set();
323
+ const clusters = [];
324
+ for (const file of adjacency.keys()) {
325
+ if (visited.has(file)) continue;
326
+ const component = [];
327
+ const queue = [file];
328
+ visited.add(file);
329
+ while (queue.length > 0) {
330
+ const curr = queue.shift();
331
+ component.push(curr);
332
+ for (const neighbor of adjacency.get(curr) || []) {
333
+ if (!visited.has(neighbor)) {
334
+ visited.add(neighbor);
335
+ queue.push(neighbor);
336
+ }
337
+ }
338
+ }
339
+ if (component.length >= 2) {
340
+ let totalTokenCost = 0;
341
+ let avgSimilarity = 0;
342
+ let duplicateCount = 0;
343
+ component.forEach((f) => {
344
+ const data = fileData.get(f);
345
+ totalTokenCost += data.tokenCost;
346
+ avgSimilarity += data.similarity;
347
+ duplicateCount += data.count;
348
+ });
349
+ avgSimilarity = avgSimilarity / Math.max(1, duplicateCount);
350
+ duplicateCount = duplicateCount / 2;
351
+ const name = determineClusterName(component);
352
+ const { severity, reason, suggestion } = calculateSeverity(
353
+ component[0],
354
+ component[1],
355
+ "",
356
+ // Code not available here
357
+ avgSimilarity,
358
+ 30
359
+ // Assume substantial if clustered
360
+ );
361
+ clusters.push({
362
+ id: `cluster-${clusters.length}`,
363
+ name,
364
+ files: component,
365
+ severity,
366
+ duplicateCount,
367
+ totalTokenCost,
368
+ averageSimilarity: avgSimilarity,
369
+ reason,
370
+ suggestion
371
+ });
372
+ }
373
+ }
374
+ return clusters;
375
+ }
376
+ function determineClusterName(files) {
377
+ if (files.length === 0) return "Unknown Cluster";
378
+ const first = files[0];
379
+ const parts = first.split("/");
380
+ if (parts.length > 1) {
381
+ return `${parts[parts.length - 2]} Domain Group`;
382
+ }
383
+ return "Shared Pattern Group";
384
+ }
385
+ function filterClustersByImpact(clusters, minTokenCost = 1e3, minFiles = 3) {
386
+ return clusters.filter(
387
+ (c) => c.totalTokenCost >= minTokenCost || c.files.length >= minFiles
388
+ );
389
+ }
390
+
391
+ // src/scoring.ts
392
+ import {
393
+ calculateMonthlyCost,
394
+ calculateProductivityImpact,
395
+ DEFAULT_COST_CONFIG
396
+ } from "@aiready/core";
397
+ function calculatePatternScore(duplicates, totalFilesAnalyzed, costConfig) {
398
+ const totalDuplicates = duplicates.length;
399
+ const totalTokenCost = duplicates.reduce((sum, d) => sum + d.tokenCost, 0);
400
+ const highImpactDuplicates = duplicates.filter(
401
+ (d) => d.tokenCost > 1e3 || d.similarity > 0.7
402
+ ).length;
403
+ if (totalFilesAnalyzed === 0) {
404
+ return {
405
+ toolName: "pattern-detect",
406
+ score: 100,
407
+ rawMetrics: {
408
+ totalDuplicates: 0,
409
+ totalTokenCost: 0,
410
+ highImpactDuplicates: 0,
411
+ totalFilesAnalyzed: 0
412
+ },
413
+ factors: [],
414
+ recommendations: []
415
+ };
416
+ }
417
+ const duplicatesPerFile = totalDuplicates / totalFilesAnalyzed * 100;
418
+ const tokenWastePerFile = totalTokenCost / totalFilesAnalyzed;
419
+ const duplicatesPenalty = Math.min(60, duplicatesPerFile * 0.6);
420
+ const tokenPenalty = Math.min(40, tokenWastePerFile / 125);
421
+ const highImpactPenalty = highImpactDuplicates > 0 ? Math.min(15, highImpactDuplicates * 2 - 5) : -5;
422
+ const score = 100 - duplicatesPenalty - tokenPenalty - highImpactPenalty;
423
+ const finalScore = Math.max(0, Math.min(100, Math.round(score)));
424
+ const factors = [
425
+ {
426
+ name: "Duplication Density",
427
+ impact: -Math.round(duplicatesPenalty),
428
+ description: `${duplicatesPerFile.toFixed(1)} duplicates per 100 files`
429
+ },
430
+ {
431
+ name: "Token Waste",
432
+ impact: -Math.round(tokenPenalty),
433
+ description: `${Math.round(tokenWastePerFile)} tokens wasted per file`
434
+ }
435
+ ];
436
+ if (highImpactDuplicates > 0) {
437
+ factors.push({
438
+ name: "High-Impact Patterns",
439
+ impact: -Math.round(highImpactPenalty),
440
+ description: `${highImpactDuplicates} high-impact duplicates (>1000 tokens or >70% similar)`
441
+ });
442
+ } else {
443
+ factors.push({
444
+ name: "No High-Impact Patterns",
445
+ impact: 5,
446
+ description: "No severe duplicates detected"
447
+ });
448
+ }
449
+ const recommendations = [];
450
+ if (highImpactDuplicates > 0) {
451
+ const estimatedImpact = Math.min(15, highImpactDuplicates * 3);
452
+ recommendations.push({
453
+ action: `Deduplicate ${highImpactDuplicates} high-impact pattern${highImpactDuplicates > 1 ? "s" : ""}`,
454
+ estimatedImpact,
455
+ priority: "high"
456
+ });
457
+ }
458
+ if (totalDuplicates > 10 && duplicatesPerFile > 20) {
459
+ const estimatedImpact = Math.min(10, Math.round(duplicatesPenalty * 0.3));
460
+ recommendations.push({
461
+ action: "Extract common patterns into shared utilities",
462
+ estimatedImpact,
463
+ priority: "medium"
464
+ });
465
+ }
466
+ if (tokenWastePerFile > 2e3) {
467
+ const estimatedImpact = Math.min(8, Math.round(tokenPenalty * 0.4));
468
+ recommendations.push({
469
+ action: "Consolidate duplicated logic to reduce AI context waste",
470
+ estimatedImpact,
471
+ priority: totalTokenCost > 1e4 ? "high" : "medium"
472
+ });
473
+ }
474
+ const cfg = { ...DEFAULT_COST_CONFIG, ...costConfig };
475
+ const estimatedMonthlyCost = calculateMonthlyCost(totalTokenCost, cfg);
476
+ const issues = duplicates.map((d) => ({
477
+ severity: d.severity === "critical" ? "critical" : d.severity === "major" ? "major" : "minor"
478
+ }));
479
+ const productivityImpact = calculateProductivityImpact(issues);
480
+ return {
481
+ toolName: "pattern-detect",
482
+ score: finalScore,
483
+ rawMetrics: {
484
+ totalDuplicates,
485
+ totalTokenCost,
486
+ highImpactDuplicates,
487
+ totalFilesAnalyzed,
488
+ duplicatesPerFile: Math.round(duplicatesPerFile * 10) / 10,
489
+ tokenWastePerFile: Math.round(tokenWastePerFile),
490
+ // Business value metrics
491
+ estimatedMonthlyCost,
492
+ estimatedDeveloperHours: productivityImpact.totalHours
493
+ },
494
+ factors,
495
+ recommendations
496
+ };
497
+ }
498
+
499
+ // src/index.ts
500
+ function getRefactoringSuggestion(patternType, similarity) {
501
+ const baseMessages = {
502
+ "api-handler": "Extract common middleware or create a base handler class",
503
+ validator: "Consolidate validation logic into shared schema validators (Zod/Yup)",
504
+ utility: "Move to a shared utilities file and reuse across modules",
505
+ "class-method": "Consider inheritance or composition to share behavior",
506
+ component: "Extract shared logic into a custom hook or HOC",
507
+ function: "Extract into a shared helper function",
508
+ unknown: "Extract common logic into a reusable module"
509
+ };
510
+ const urgency = similarity > 0.95 ? " (CRITICAL: Nearly identical code)" : similarity > 0.9 ? " (HIGH: Very similar, refactor soon)" : "";
511
+ return baseMessages[patternType] + urgency;
512
+ }
513
+ async function getSmartDefaults(directory, userOptions) {
514
+ if (userOptions.useSmartDefaults === false) {
515
+ return {
516
+ rootDir: directory,
517
+ minSimilarity: 0.6,
518
+ minLines: 8,
519
+ batchSize: 100,
520
+ approx: true,
521
+ minSharedTokens: 12,
522
+ maxCandidatesPerBlock: 5,
523
+ streamResults: false,
524
+ severity: "all",
525
+ includeTests: false
526
+ };
527
+ }
528
+ const scanOptions = {
529
+ rootDir: directory,
530
+ include: userOptions.include || ["**/*.{ts,tsx,js,jsx,py,java}"],
531
+ exclude: userOptions.exclude
532
+ };
533
+ const { scanFiles: scanFiles2 } = await import("@aiready/core");
534
+ const files = await scanFiles2(scanOptions);
535
+ const estimatedBlocks = files.length * 3;
536
+ const maxCandidatesPerBlock = Math.max(
537
+ 3,
538
+ Math.min(10, Math.floor(3e4 / estimatedBlocks))
539
+ );
540
+ const minSimilarity = Math.min(0.75, 0.5 + estimatedBlocks / 1e4 * 0.25);
541
+ const minLines = Math.max(
542
+ 6,
543
+ Math.min(12, 6 + Math.floor(estimatedBlocks / 2e3))
544
+ );
545
+ const minSharedTokens = Math.max(
546
+ 10,
547
+ Math.min(20, 10 + Math.floor(estimatedBlocks / 2e3))
548
+ );
549
+ const batchSize = estimatedBlocks > 1e3 ? 200 : 100;
550
+ const severity = estimatedBlocks > 5e3 ? "high" : "all";
551
+ const defaults = {
552
+ rootDir: directory,
553
+ minSimilarity,
554
+ minLines,
555
+ batchSize,
556
+ approx: true,
557
+ minSharedTokens,
558
+ maxCandidatesPerBlock,
559
+ streamResults: false,
560
+ severity,
561
+ includeTests: false
562
+ };
563
+ const result = { ...defaults };
564
+ for (const [key, value] of Object.entries(defaults)) {
565
+ if (key in userOptions && userOptions[key] !== void 0) {
566
+ result[key] = userOptions[key];
567
+ }
568
+ }
569
+ return result;
570
+ }
571
+ function logConfiguration(config, estimatedBlocks) {
572
+ if (config.suppressToolConfig) return;
573
+ console.log("\u{1F4CB} Configuration:");
574
+ console.log(` Repository size: ~${estimatedBlocks} code blocks`);
575
+ console.log(` Similarity threshold: ${config.minSimilarity}`);
576
+ console.log(` Minimum lines: ${config.minLines}`);
577
+ console.log(` Approximate mode: ${config.approx ? "enabled" : "disabled"}`);
578
+ console.log(` Max candidates per block: ${config.maxCandidatesPerBlock}`);
579
+ console.log(` Min shared tokens: ${config.minSharedTokens}`);
580
+ console.log(` Severity filter: ${config.severity}`);
581
+ console.log(` Include tests: ${config.includeTests}`);
582
+ console.log("");
583
+ }
584
+ async function analyzePatterns(options) {
585
+ const smartDefaults = await getSmartDefaults(options.rootDir || ".", options);
586
+ const finalOptions = { ...smartDefaults, ...options };
587
+ const {
588
+ minSimilarity = 0.4,
589
+ minLines = 5,
590
+ batchSize = 100,
591
+ approx = true,
592
+ minSharedTokens = 8,
593
+ maxCandidatesPerBlock = 100,
594
+ streamResults = false,
595
+ severity = "all",
596
+ includeTests = false,
597
+ groupByFilePair = true,
598
+ createClusters = true,
599
+ minClusterTokenCost = 1e3,
600
+ minClusterFiles = 3,
601
+ ...scanOptions
602
+ } = finalOptions;
603
+ const { scanFiles: scanFiles2 } = await import("@aiready/core");
604
+ const files = await scanFiles2(scanOptions);
605
+ const estimatedBlocks = files.length * 3;
606
+ logConfiguration(finalOptions, estimatedBlocks);
607
+ const results = [];
608
+ const BATCH_SIZE = 50;
609
+ const fileContents = [];
610
+ for (let i = 0; i < files.length; i += BATCH_SIZE) {
611
+ const batch = files.slice(i, i + BATCH_SIZE);
612
+ const batchContents = await Promise.all(
613
+ batch.map(async (file) => ({
614
+ file,
615
+ content: await readFileContent(file)
616
+ }))
617
+ );
618
+ fileContents.push(...batchContents);
619
+ }
620
+ const duplicates = await detectDuplicatePatterns(fileContents, {
621
+ minSimilarity,
622
+ minLines,
623
+ batchSize,
624
+ approx,
625
+ minSharedTokens,
626
+ maxCandidatesPerBlock,
627
+ streamResults,
628
+ onProgress: options.onProgress
629
+ });
630
+ for (const file of files) {
631
+ const fileDuplicates = duplicates.filter(
632
+ (dup) => dup.file1 === file || dup.file2 === file
633
+ );
634
+ const issues = fileDuplicates.map((dup) => {
635
+ const otherFile = dup.file1 === file ? dup.file2 : dup.file1;
636
+ const severity2 = dup.similarity > 0.95 ? Severity4.Critical : dup.similarity > 0.9 ? Severity4.Major : Severity4.Minor;
637
+ return {
638
+ type: IssueType.DuplicatePattern,
639
+ severity: severity2,
640
+ message: `${dup.patternType} pattern ${Math.round(dup.similarity * 100)}% similar to ${otherFile} (${dup.tokenCost} tokens wasted)`,
641
+ location: {
642
+ file,
643
+ line: dup.file1 === file ? dup.line1 : dup.line2
644
+ },
645
+ suggestion: getRefactoringSuggestion(dup.patternType, dup.similarity)
646
+ };
647
+ });
648
+ let filteredIssues = issues;
649
+ if (severity !== "all") {
650
+ const severityMap = {
651
+ critical: [Severity4.Critical],
652
+ high: [Severity4.Critical, Severity4.Major],
653
+ medium: [Severity4.Critical, Severity4.Major, Severity4.Minor]
654
+ };
655
+ const allowedSeverities = severityMap[severity] || [Severity4.Critical, Severity4.Major, Severity4.Minor];
656
+ filteredIssues = issues.filter(
657
+ (issue) => allowedSeverities.includes(issue.severity)
658
+ );
659
+ }
660
+ const totalTokenCost = fileDuplicates.reduce(
661
+ (sum, dup) => sum + dup.tokenCost,
662
+ 0
663
+ );
664
+ results.push({
665
+ fileName: file,
666
+ issues: filteredIssues,
667
+ metrics: {
668
+ tokenCost: totalTokenCost,
669
+ consistencyScore: Math.max(0, 1 - fileDuplicates.length * 0.1)
670
+ }
671
+ });
672
+ }
673
+ let groups;
674
+ let clusters;
675
+ if (groupByFilePair) {
676
+ groups = groupDuplicatesByFilePair(duplicates);
677
+ }
678
+ if (createClusters) {
679
+ const allClusters = createRefactorClusters(duplicates);
680
+ clusters = filterClustersByImpact(
681
+ allClusters,
682
+ minClusterTokenCost,
683
+ minClusterFiles
684
+ );
685
+ }
686
+ return { results, duplicates, files, groups, clusters };
687
+ }
688
+ function generateSummary(results) {
689
+ const allIssues = results.flatMap((r) => r.issues);
690
+ const totalTokenCost = results.reduce(
691
+ (sum, r) => sum + (r.metrics.tokenCost || 0),
692
+ 0
693
+ );
694
+ const patternsByType = {
695
+ "api-handler": 0,
696
+ validator: 0,
697
+ utility: 0,
698
+ "class-method": 0,
699
+ component: 0,
700
+ function: 0,
701
+ unknown: 0
702
+ };
703
+ allIssues.forEach((issue) => {
704
+ const match = issue.message.match(/^(\S+(?:-\S+)*) pattern/);
705
+ if (match) {
706
+ const type = match[1];
707
+ patternsByType[type] = (patternsByType[type] || 0) + 1;
708
+ }
709
+ });
710
+ const topDuplicates = allIssues.slice(0, 10).map((issue) => {
711
+ const similarityMatch = issue.message.match(/(\d+)% similar/);
712
+ const tokenMatch = issue.message.match(/\((\d+) tokens/);
713
+ const typeMatch = issue.message.match(/^(\S+(?:-\S+)*) pattern/);
714
+ const fileMatch = issue.message.match(/similar to (.+?) \(/);
715
+ return {
716
+ files: [
717
+ {
718
+ path: issue.location.file,
719
+ startLine: issue.location.line,
720
+ endLine: 0
721
+ // Not available from Issue
722
+ },
723
+ {
724
+ path: fileMatch?.[1] || "unknown",
725
+ startLine: 0,
726
+ // Not available from Issue
727
+ endLine: 0
728
+ // Not available from Issue
729
+ }
730
+ ],
731
+ similarity: similarityMatch ? parseInt(similarityMatch[1]) / 100 : 0,
732
+ patternType: typeMatch?.[1] || "unknown",
733
+ tokenCost: tokenMatch ? parseInt(tokenMatch[1]) : 0
734
+ };
735
+ });
736
+ return {
737
+ totalPatterns: allIssues.length,
738
+ totalTokenCost,
739
+ patternsByType,
740
+ topDuplicates
741
+ };
742
+ }
743
+
744
+ export {
745
+ calculateSeverity,
746
+ getSeverityLabel,
747
+ filterBySeverity,
748
+ detectDuplicatePatterns,
749
+ calculatePatternScore,
750
+ Severity4 as Severity,
751
+ getSmartDefaults,
752
+ analyzePatterns,
753
+ generateSummary
754
+ };