@aiready/pattern-detect 0.16.18 → 0.16.19

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,12 @@
1
+ import {
2
+ analyzePatterns,
3
+ generateSummary,
4
+ getSmartDefaults
5
+ } from "./chunk-WMOGJFME.mjs";
6
+ import "./chunk-THF4RW63.mjs";
7
+ import "./chunk-I6ETJC7L.mjs";
8
+ export {
9
+ analyzePatterns,
10
+ generateSummary,
11
+ getSmartDefaults
12
+ };
@@ -0,0 +1,179 @@
1
+ // src/context-rules.ts
2
+ import { Severity } from "@aiready/core";
3
+ var CONTEXT_RULES = [
4
+ // Test Fixtures - Intentional duplication for test isolation
5
+ {
6
+ name: "test-fixtures",
7
+ detect: (file, code) => {
8
+ const isTestFile = file.includes(".test.") || file.includes(".spec.") || file.includes("__tests__") || file.includes("/test/") || file.includes("/tests/");
9
+ const hasTestFixtures = code.includes("beforeAll") || code.includes("afterAll") || code.includes("beforeEach") || code.includes("afterEach") || code.includes("setUp") || code.includes("tearDown");
10
+ return isTestFile && hasTestFixtures;
11
+ },
12
+ severity: Severity.Info,
13
+ reason: "Test fixture duplication is intentional for test isolation",
14
+ suggestion: "Consider if shared test setup would improve maintainability without coupling tests"
15
+ },
16
+ // Email/Document Templates - Often intentionally similar for consistency
17
+ {
18
+ name: "templates",
19
+ detect: (file, code) => {
20
+ const isTemplate = file.includes("/templates/") || file.includes("-template") || file.includes("/email-templates/") || file.includes("/emails/");
21
+ const hasTemplateContent = (code.includes("return") || code.includes("export")) && (code.includes("html") || code.includes("subject") || code.includes("body"));
22
+ return isTemplate && hasTemplateContent;
23
+ },
24
+ severity: Severity.Minor,
25
+ reason: "Template duplication may be intentional for maintainability and branding consistency",
26
+ suggestion: "Extract shared structure only if templates become hard to maintain"
27
+ },
28
+ // E2E/Integration Test Page Objects - Test independence
29
+ {
30
+ name: "e2e-page-objects",
31
+ detect: (file, code) => {
32
+ const isE2ETest = file.includes("e2e/") || file.includes("/e2e/") || file.includes(".e2e.") || file.includes("/playwright/") || file.includes("playwright/") || file.includes("/cypress/") || file.includes("cypress/") || file.includes("/integration/") || file.includes("integration/");
33
+ const hasPageObjectPatterns = code.includes("page.") || code.includes("await page") || code.includes("locator") || code.includes("getBy") || code.includes("selector") || code.includes("click(") || code.includes("fill(");
34
+ return isE2ETest && hasPageObjectPatterns;
35
+ },
36
+ severity: Severity.Minor,
37
+ reason: "E2E test duplication ensures test independence and reduces coupling",
38
+ suggestion: "Consider page object pattern only if duplication causes maintenance issues"
39
+ },
40
+ // Configuration Files - Often necessarily similar by design
41
+ {
42
+ name: "config-files",
43
+ detect: (file) => {
44
+ return file.endsWith(".config.ts") || file.endsWith(".config.js") || file.includes("jest.config") || file.includes("vite.config") || file.includes("webpack.config") || file.includes("rollup.config") || file.includes("tsconfig");
45
+ },
46
+ severity: Severity.Minor,
47
+ reason: "Configuration files often have similar structure by design",
48
+ suggestion: "Consider shared config base only if configurations become hard to maintain"
49
+ },
50
+ // Type Definitions - Duplication for type safety and module independence
51
+ {
52
+ name: "type-definitions",
53
+ detect: (file, code) => {
54
+ const isTypeFile = file.endsWith(".d.ts") || file.includes("/types/");
55
+ const hasTypeDefinitions = code.includes("interface ") || code.includes("type ") || code.includes("enum ");
56
+ return isTypeFile && hasTypeDefinitions;
57
+ },
58
+ severity: Severity.Info,
59
+ reason: "Type duplication may be intentional for module independence and type safety",
60
+ suggestion: "Extract to shared types package only if causing maintenance burden"
61
+ },
62
+ // Migration Scripts - One-off scripts that are similar by nature
63
+ {
64
+ name: "migration-scripts",
65
+ detect: (file) => {
66
+ return file.includes("/migrations/") || file.includes("/migrate/") || file.includes(".migration.");
67
+ },
68
+ severity: Severity.Info,
69
+ reason: "Migration scripts are typically one-off and intentionally similar",
70
+ suggestion: "Duplication is acceptable for migration scripts"
71
+ },
72
+ // Mock Data - Test data intentionally duplicated
73
+ {
74
+ name: "mock-data",
75
+ detect: (file, code) => {
76
+ const isMockFile = file.includes("/mocks/") || file.includes("/__mocks__/") || file.includes("/fixtures/") || file.includes(".mock.") || file.includes(".fixture.");
77
+ const hasMockData = code.includes("mock") || code.includes("Mock") || code.includes("fixture") || code.includes("stub") || code.includes("export const");
78
+ return isMockFile && hasMockData;
79
+ },
80
+ severity: Severity.Info,
81
+ reason: "Mock data duplication is expected for comprehensive test coverage",
82
+ suggestion: "Consider shared factories only for complex mock generation"
83
+ },
84
+ // Tool Implementations - Structural Boilerplate
85
+ {
86
+ name: "tool-implementations",
87
+ detect: (file, code) => {
88
+ const isToolFile = file.includes("/tools/") || file.endsWith(".tool.ts") || code.includes("toolDefinitions");
89
+ const hasToolStructure = code.includes("execute") && (code.includes("try") || code.includes("catch"));
90
+ return isToolFile && hasToolStructure;
91
+ },
92
+ severity: Severity.Info,
93
+ reason: "Tool implementations share structural boilerplate but have distinct business logic",
94
+ suggestion: "Tool duplication is acceptable for boilerplate interface wrappers"
95
+ }
96
+ ];
97
+ function calculateSeverity(file1, file2, code, similarity, linesOfCode) {
98
+ for (const rule of CONTEXT_RULES) {
99
+ if (rule.detect(file1, code) || rule.detect(file2, code)) {
100
+ return {
101
+ severity: rule.severity,
102
+ reason: rule.reason,
103
+ suggestion: rule.suggestion,
104
+ matchedRule: rule.name
105
+ };
106
+ }
107
+ }
108
+ if (similarity >= 0.95 && linesOfCode >= 30) {
109
+ return {
110
+ severity: Severity.Critical,
111
+ reason: "Large nearly-identical code blocks waste tokens and create maintenance burden",
112
+ suggestion: "Extract to shared utility module immediately"
113
+ };
114
+ } else if (similarity >= 0.95 && linesOfCode >= 15) {
115
+ return {
116
+ severity: Severity.Major,
117
+ reason: "Nearly identical code should be consolidated",
118
+ suggestion: "Move to shared utility file"
119
+ };
120
+ } else if (similarity >= 0.85) {
121
+ return {
122
+ severity: Severity.Major,
123
+ reason: "High similarity indicates significant duplication",
124
+ suggestion: "Extract common logic to shared function"
125
+ };
126
+ } else if (similarity >= 0.7) {
127
+ return {
128
+ severity: Severity.Minor,
129
+ reason: "Moderate similarity detected",
130
+ suggestion: "Consider extracting shared patterns if code evolves together"
131
+ };
132
+ } else {
133
+ return {
134
+ severity: Severity.Minor,
135
+ reason: "Minor similarity detected",
136
+ suggestion: "Monitor but refactoring may not be worthwhile"
137
+ };
138
+ }
139
+ }
140
+ function getSeverityLabel(severity) {
141
+ const labels = {
142
+ [Severity.Critical]: "\u{1F534} CRITICAL",
143
+ [Severity.Major]: "\u{1F7E1} MAJOR",
144
+ [Severity.Minor]: "\u{1F535} MINOR",
145
+ [Severity.Info]: "\u2139\uFE0F INFO"
146
+ };
147
+ return labels[severity];
148
+ }
149
+ function filterBySeverity(duplicates, minSeverity) {
150
+ const severityOrder = [
151
+ Severity.Info,
152
+ Severity.Minor,
153
+ Severity.Major,
154
+ Severity.Critical
155
+ ];
156
+ const minIndex = severityOrder.indexOf(minSeverity);
157
+ if (minIndex === -1) return duplicates;
158
+ return duplicates.filter((dup) => {
159
+ const dupIndex = severityOrder.indexOf(dup.severity);
160
+ return dupIndex >= minIndex;
161
+ });
162
+ }
163
+ function getSeverityThreshold(severity) {
164
+ const thresholds = {
165
+ [Severity.Critical]: 0.95,
166
+ [Severity.Major]: 0.85,
167
+ [Severity.Minor]: 0.5,
168
+ [Severity.Info]: 0
169
+ };
170
+ return thresholds[severity] || 0;
171
+ }
172
+
173
+ export {
174
+ CONTEXT_RULES,
175
+ calculateSeverity,
176
+ getSeverityLabel,
177
+ filterBySeverity,
178
+ getSeverityThreshold
179
+ };
@@ -0,0 +1,254 @@
1
+ import {
2
+ calculateSeverity
3
+ } from "./chunk-I6ETJC7L.mjs";
4
+
5
+ // src/detector.ts
6
+ import { estimateTokens } from "@aiready/core";
7
+ function normalizeCode(code, isPython = false) {
8
+ let normalized = code;
9
+ if (isPython) {
10
+ normalized = normalized.replace(/#.*/g, "");
11
+ } else {
12
+ normalized = normalized.replace(/\/\/.*/g, "").replace(/\/\*[\s\S]*?\*\//g, "");
13
+ }
14
+ return normalized.replace(/['"`]/g, '"').replace(/\s+/g, " ").trim().toLowerCase();
15
+ }
16
+ function extractBlocks(file, content) {
17
+ const isPython = file.toLowerCase().endsWith(".py");
18
+ if (isPython) {
19
+ return extractBlocksPython(file, content);
20
+ }
21
+ const blocks = [];
22
+ const lines = content.split("\n");
23
+ const blockRegex = /^\s*(?:export\s+)?(?:async\s+)?(?:public\s+|private\s+|protected\s+|internal\s+|static\s+|readonly\s+|virtual\s+|abstract\s+|override\s+)*(function|class|interface|type|enum|record|struct|void|func|[a-zA-Z0-9_<>[]]+)\s+([a-zA-Z0-9_]+)(?:\s*\(|(?:\s+extends|\s+implements|\s+where)?\s*\{)|^\s*(?:export\s+)?const\s+([a-zA-Z0-9_]+)\s*=\s*[a-zA-Z0-9_.]+\.object\(|^\s*(app\.(?:get|post|put|delete|patch|use))\(/gm;
24
+ let match;
25
+ while ((match = blockRegex.exec(content)) !== null) {
26
+ const startLine = content.substring(0, match.index).split("\n").length;
27
+ let type;
28
+ let name;
29
+ if (match[1]) {
30
+ type = match[1];
31
+ name = match[2];
32
+ } else if (match[3]) {
33
+ type = "const";
34
+ name = match[3];
35
+ } else {
36
+ type = "handler";
37
+ name = match[4];
38
+ }
39
+ let endLine = -1;
40
+ let openBraces = 0;
41
+ let foundStart = false;
42
+ for (let i = match.index; i < content.length; i++) {
43
+ if (content[i] === "{") {
44
+ openBraces++;
45
+ foundStart = true;
46
+ } else if (content[i] === "}") {
47
+ openBraces--;
48
+ }
49
+ if (foundStart && openBraces === 0) {
50
+ endLine = content.substring(0, i + 1).split("\n").length;
51
+ break;
52
+ }
53
+ }
54
+ if (endLine === -1) {
55
+ const remaining = content.slice(match.index);
56
+ const nextLineMatch = remaining.indexOf("\n");
57
+ if (nextLineMatch !== -1) {
58
+ endLine = startLine;
59
+ } else {
60
+ endLine = lines.length;
61
+ }
62
+ }
63
+ endLine = Math.max(startLine, endLine);
64
+ const blockCode = lines.slice(startLine - 1, endLine).join("\n");
65
+ const tokens = estimateTokens(blockCode);
66
+ blocks.push({
67
+ file,
68
+ startLine,
69
+ endLine,
70
+ code: blockCode,
71
+ tokens,
72
+ patternType: inferPatternType(type, name)
73
+ });
74
+ }
75
+ return blocks;
76
+ }
77
+ function extractBlocksPython(file, content) {
78
+ const blocks = [];
79
+ const lines = content.split("\n");
80
+ const blockRegex = /^\s*(?:async\s+)?(def|class)\s+([a-zA-Z0-9_]+)/gm;
81
+ let match;
82
+ while ((match = blockRegex.exec(content)) !== null) {
83
+ const startLinePos = content.substring(0, match.index).split("\n").length;
84
+ const startLineIdx = startLinePos - 1;
85
+ const initialIndent = lines[startLineIdx].search(/\S/);
86
+ let endLineIdx = startLineIdx;
87
+ for (let i = startLineIdx + 1; i < lines.length; i++) {
88
+ const line = lines[i];
89
+ if (line.trim().length === 0) {
90
+ endLineIdx = i;
91
+ continue;
92
+ }
93
+ const currentIndent = line.search(/\S/);
94
+ if (currentIndent <= initialIndent) {
95
+ break;
96
+ }
97
+ endLineIdx = i;
98
+ }
99
+ while (endLineIdx > startLineIdx && lines[endLineIdx].trim().length === 0) {
100
+ endLineIdx--;
101
+ }
102
+ const blockCode = lines.slice(startLineIdx, endLineIdx + 1).join("\n");
103
+ const tokens = estimateTokens(blockCode);
104
+ blocks.push({
105
+ file,
106
+ startLine: startLinePos,
107
+ endLine: endLineIdx + 1,
108
+ code: blockCode,
109
+ tokens,
110
+ patternType: inferPatternType(match[1], match[2])
111
+ });
112
+ }
113
+ return blocks;
114
+ }
115
+ function inferPatternType(keyword, name) {
116
+ const n = name.toLowerCase();
117
+ if (keyword === "handler" || n.includes("handler") || n.includes("controller") || n.startsWith("app.")) {
118
+ return "api-handler";
119
+ }
120
+ if (n.includes("validate") || n.includes("schema")) return "validator";
121
+ if (n.includes("util") || n.includes("helper")) return "utility";
122
+ if (keyword === "class") return "class-method";
123
+ if (n.match(/^[A-Z]/)) return "component";
124
+ if (keyword === "function") return "function";
125
+ return "unknown";
126
+ }
127
+ function calculateSimilarity(a, b) {
128
+ if (a === b) return 1;
129
+ const tokensA = a.split(/[^a-zA-Z0-9]+/).filter((t) => t.length > 0);
130
+ const tokensB = b.split(/[^a-zA-Z0-9]+/).filter((t) => t.length > 0);
131
+ if (tokensA.length === 0 || tokensB.length === 0) return 0;
132
+ const setA = new Set(tokensA);
133
+ const setB = new Set(tokensB);
134
+ const intersection = new Set([...setA].filter((x) => setB.has(x)));
135
+ const union = /* @__PURE__ */ new Set([...setA, ...setB]);
136
+ return intersection.size / union.size;
137
+ }
138
+ function calculateConfidence(similarity, tokens, lines) {
139
+ let confidence = similarity;
140
+ if (lines > 20) confidence += 0.05;
141
+ if (tokens > 200) confidence += 0.05;
142
+ if (lines < 5) confidence -= 0.1;
143
+ return Math.max(0, Math.min(1, confidence));
144
+ }
145
+ async function detectDuplicatePatterns(fileContents, options) {
146
+ const {
147
+ minSimilarity,
148
+ minLines,
149
+ streamResults,
150
+ onProgress,
151
+ excludePatterns = [],
152
+ confidenceThreshold = 0,
153
+ ignoreWhitelist = []
154
+ } = options;
155
+ const allBlocks = [];
156
+ const excludeRegexes = excludePatterns.map((p) => new RegExp(p, "i"));
157
+ for (const { file, content } of fileContents) {
158
+ const blocks = extractBlocks(file, content);
159
+ for (const b of blocks) {
160
+ if (b.endLine - b.startLine + 1 < minLines) continue;
161
+ const isExcluded = excludeRegexes.some((regex) => regex.test(b.code));
162
+ if (isExcluded) continue;
163
+ allBlocks.push(b);
164
+ }
165
+ }
166
+ const duplicates = [];
167
+ const totalBlocks = allBlocks.length;
168
+ let comparisons = 0;
169
+ const totalComparisons = totalBlocks * (totalBlocks - 1) / 2;
170
+ if (onProgress) {
171
+ onProgress(
172
+ 0,
173
+ totalComparisons,
174
+ `Starting duplicate detection on ${totalBlocks} blocks...`
175
+ );
176
+ }
177
+ for (let i = 0; i < allBlocks.length; i++) {
178
+ if (i % 50 === 0 && i > 0) {
179
+ await new Promise((resolve) => setImmediate(resolve));
180
+ if (onProgress) {
181
+ onProgress(
182
+ comparisons,
183
+ totalComparisons,
184
+ `Analyzing blocks (${i}/${totalBlocks})...`
185
+ );
186
+ }
187
+ }
188
+ const b1 = allBlocks[i];
189
+ const isPython1 = b1.file.toLowerCase().endsWith(".py");
190
+ const norm1 = normalizeCode(b1.code, isPython1);
191
+ for (let j = i + 1; j < allBlocks.length; j++) {
192
+ comparisons++;
193
+ const b2 = allBlocks[j];
194
+ if (b1.file === b2.file) continue;
195
+ const isWhitelisted = ignoreWhitelist.some((pattern) => {
196
+ return b1.file.includes(pattern) && b2.file.includes(pattern) || pattern === `${b1.file}::${b2.file}` || pattern === `${b2.file}::${b1.file}`;
197
+ });
198
+ if (isWhitelisted) continue;
199
+ const isPython2 = b2.file.toLowerCase().endsWith(".py");
200
+ const norm2 = normalizeCode(b2.code, isPython2);
201
+ const sim = calculateSimilarity(norm1, norm2);
202
+ if (sim >= minSimilarity) {
203
+ const confidence = calculateConfidence(
204
+ sim,
205
+ b1.tokens,
206
+ b1.endLine - b1.startLine + 1
207
+ );
208
+ if (confidence < confidenceThreshold) continue;
209
+ const { severity, reason, suggestion, matchedRule } = calculateSeverity(
210
+ b1.file,
211
+ b2.file,
212
+ b1.code,
213
+ sim,
214
+ b1.endLine - b1.startLine + 1
215
+ );
216
+ const dup = {
217
+ file1: b1.file,
218
+ line1: b1.startLine,
219
+ endLine1: b1.endLine,
220
+ file2: b2.file,
221
+ line2: b2.startLine,
222
+ endLine2: b2.endLine,
223
+ code1: b1.code,
224
+ code2: b2.code,
225
+ similarity: sim,
226
+ confidence,
227
+ patternType: b1.patternType,
228
+ tokenCost: b1.tokens + b2.tokens,
229
+ severity,
230
+ reason,
231
+ suggestion,
232
+ matchedRule
233
+ };
234
+ duplicates.push(dup);
235
+ if (streamResults)
236
+ console.log(
237
+ `[DUPLICATE] ${dup.file1}:${dup.line1} <-> ${dup.file2}:${dup.line2} (${Math.round(sim * 100)}%, conf: ${Math.round(confidence * 100)}%)`
238
+ );
239
+ }
240
+ }
241
+ }
242
+ if (onProgress) {
243
+ onProgress(
244
+ totalComparisons,
245
+ totalComparisons,
246
+ `Duplicate detection complete. Found ${duplicates.length} patterns.`
247
+ );
248
+ }
249
+ return duplicates.sort((a, b) => b.similarity - a.similarity);
250
+ }
251
+
252
+ export {
253
+ detectDuplicatePatterns
254
+ };
@@ -0,0 +1,64 @@
1
+ import {
2
+ analyzePatterns
3
+ } from "./chunk-WMOGJFME.mjs";
4
+ import {
5
+ calculatePatternScore
6
+ } from "./chunk-WBBO35SC.mjs";
7
+
8
+ // src/index.ts
9
+ import { ToolRegistry, Severity } from "@aiready/core";
10
+
11
+ // src/provider.ts
12
+ import {
13
+ ToolName,
14
+ SpokeOutputSchema,
15
+ GLOBAL_SCAN_OPTIONS
16
+ } from "@aiready/core";
17
+ var PatternDetectProvider = {
18
+ id: ToolName.PatternDetect,
19
+ alias: ["patterns", "duplicates", "duplication"],
20
+ async analyze(options) {
21
+ const results = await analyzePatterns(options);
22
+ return SpokeOutputSchema.parse({
23
+ results: results.results,
24
+ summary: {
25
+ totalFiles: results.files.length,
26
+ totalIssues: results.results.reduce(
27
+ (sum, r) => sum + r.issues.length,
28
+ 0
29
+ ),
30
+ duplicates: results.duplicates,
31
+ // Keep the raw duplicates for score calculation
32
+ clusters: results.clusters,
33
+ config: Object.fromEntries(
34
+ Object.entries(results.config).filter(
35
+ ([key]) => !GLOBAL_SCAN_OPTIONS.includes(key) || key === "rootDir"
36
+ )
37
+ )
38
+ },
39
+ metadata: {
40
+ toolName: ToolName.PatternDetect,
41
+ version: "0.12.5",
42
+ timestamp: (/* @__PURE__ */ new Date()).toISOString()
43
+ }
44
+ });
45
+ },
46
+ score(output, options) {
47
+ const duplicates = output.summary.duplicates || [];
48
+ const totalFiles = output.summary.totalFiles || output.results.length;
49
+ return calculatePatternScore(
50
+ duplicates,
51
+ totalFiles,
52
+ options.costConfig
53
+ );
54
+ },
55
+ defaultWeight: 22
56
+ };
57
+
58
+ // src/index.ts
59
+ ToolRegistry.register(PatternDetectProvider);
60
+
61
+ export {
62
+ PatternDetectProvider,
63
+ Severity
64
+ };
@@ -0,0 +1,112 @@
1
+ // src/scoring.ts
2
+ import {
3
+ calculateMonthlyCost,
4
+ calculateProductivityImpact,
5
+ DEFAULT_COST_CONFIG,
6
+ ToolName
7
+ } from "@aiready/core";
8
+ function calculatePatternScore(duplicates, totalFilesAnalyzed, costConfig) {
9
+ const totalDuplicates = duplicates.length;
10
+ const totalTokenCost = duplicates.reduce((sum, d) => sum + d.tokenCost, 0);
11
+ const highImpactDuplicates = duplicates.filter(
12
+ (d) => d.tokenCost > 1e3 || d.similarity > 0.7
13
+ ).length;
14
+ if (totalFilesAnalyzed === 0) {
15
+ return {
16
+ toolName: ToolName.PatternDetect,
17
+ score: 100,
18
+ rawMetrics: {
19
+ totalDuplicates: 0,
20
+ totalTokenCost: 0,
21
+ highImpactDuplicates: 0,
22
+ totalFilesAnalyzed: 0
23
+ },
24
+ factors: [],
25
+ recommendations: []
26
+ };
27
+ }
28
+ const duplicatesPerFile = totalDuplicates / totalFilesAnalyzed * 100;
29
+ const tokenWastePerFile = totalTokenCost / totalFilesAnalyzed;
30
+ const duplicatesPenalty = Math.min(60, duplicatesPerFile * 0.6);
31
+ const tokenPenalty = Math.min(40, tokenWastePerFile / 125);
32
+ const highImpactPenalty = highImpactDuplicates > 0 ? Math.min(15, highImpactDuplicates * 2 - 5) : -5;
33
+ const score = 100 - duplicatesPenalty - tokenPenalty - highImpactPenalty;
34
+ const finalScore = Math.max(0, Math.min(100, Math.round(score)));
35
+ const factors = [
36
+ {
37
+ name: "Duplication Density",
38
+ impact: -Math.round(duplicatesPenalty),
39
+ description: `${duplicatesPerFile.toFixed(1)} duplicates per 100 files`
40
+ },
41
+ {
42
+ name: "Token Waste",
43
+ impact: -Math.round(tokenPenalty),
44
+ description: `${Math.round(tokenWastePerFile)} tokens wasted per file`
45
+ }
46
+ ];
47
+ if (highImpactDuplicates > 0) {
48
+ factors.push({
49
+ name: "High-Impact Patterns",
50
+ impact: -Math.round(highImpactPenalty),
51
+ description: `${highImpactDuplicates} high-impact duplicates (>1000 tokens or >70% similar)`
52
+ });
53
+ } else {
54
+ factors.push({
55
+ name: "No High-Impact Patterns",
56
+ impact: 5,
57
+ description: "No severe duplicates detected"
58
+ });
59
+ }
60
+ const recommendations = [];
61
+ if (highImpactDuplicates > 0) {
62
+ const estimatedImpact = Math.min(15, highImpactDuplicates * 3);
63
+ recommendations.push({
64
+ action: `Deduplicate ${highImpactDuplicates} high-impact pattern${highImpactDuplicates > 1 ? "s" : ""}`,
65
+ estimatedImpact,
66
+ priority: "high"
67
+ });
68
+ }
69
+ if (totalDuplicates > 10 && duplicatesPerFile > 20) {
70
+ const estimatedImpact = Math.min(10, Math.round(duplicatesPenalty * 0.3));
71
+ recommendations.push({
72
+ action: "Extract common patterns into shared utilities",
73
+ estimatedImpact,
74
+ priority: "medium"
75
+ });
76
+ }
77
+ if (tokenWastePerFile > 2e3) {
78
+ const estimatedImpact = Math.min(8, Math.round(tokenPenalty * 0.4));
79
+ recommendations.push({
80
+ action: "Consolidate duplicated logic to reduce AI context waste",
81
+ estimatedImpact,
82
+ priority: totalTokenCost > 1e4 ? "high" : "medium"
83
+ });
84
+ }
85
+ const cfg = { ...DEFAULT_COST_CONFIG, ...costConfig };
86
+ const estimatedMonthlyCost = calculateMonthlyCost(totalTokenCost, cfg);
87
+ const issues = duplicates.map((d) => ({
88
+ severity: d.severity === "critical" ? "critical" : d.severity === "major" ? "major" : "minor"
89
+ }));
90
+ const productivityImpact = calculateProductivityImpact(issues);
91
+ return {
92
+ toolName: "pattern-detect",
93
+ score: finalScore,
94
+ rawMetrics: {
95
+ totalDuplicates,
96
+ totalTokenCost,
97
+ highImpactDuplicates,
98
+ totalFilesAnalyzed,
99
+ duplicatesPerFile: Math.round(duplicatesPerFile * 10) / 10,
100
+ tokenWastePerFile: Math.round(tokenWastePerFile),
101
+ // Business value metrics
102
+ estimatedMonthlyCost,
103
+ estimatedDeveloperHours: productivityImpact.totalHours
104
+ },
105
+ factors,
106
+ recommendations
107
+ };
108
+ }
109
+
110
+ export {
111
+ calculatePatternScore
112
+ };