@aiready/pattern-detect 0.17.13 → 0.17.15

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,144 @@
1
+ // src/scoring.ts
2
+ import {
3
+ calculateMonthlyCost,
4
+ calculateProductivityImpact,
5
+ DEFAULT_COST_CONFIG,
6
+ ToolName
7
+ } from "@aiready/core";
8
+ function calculatePatternScore(duplicates, totalFilesAnalyzed, costConfig) {
9
+ const actionableDuplicates = duplicates.filter((d) => {
10
+ if (d.severity === "info") return false;
11
+ const acceptableRules = [
12
+ // Logic rules (logic-rules.ts)
13
+ "type-definitions",
14
+ "utility-functions",
15
+ "shared-hooks",
16
+ "score-helpers",
17
+ "visualization-handlers",
18
+ "switch-helpers",
19
+ "common-api-functions",
20
+ "validation-functions",
21
+ // Infrastructure rules (infra-rules.ts)
22
+ "config-files",
23
+ "migration-scripts",
24
+ "tool-implementations",
25
+ "cli-command-definitions",
26
+ // Web rules (web-rules.ts)
27
+ "templates",
28
+ "common-ui-handlers",
29
+ "nextjs-route-handlers",
30
+ // Test rules (test-rules.ts)
31
+ "test-fixtures",
32
+ "e2e-page-objects",
33
+ "mock-data"
34
+ ];
35
+ if (d.matchedRule && acceptableRules.includes(d.matchedRule)) return false;
36
+ return true;
37
+ });
38
+ const totalDuplicates = actionableDuplicates.length;
39
+ const totalTokenCost = actionableDuplicates.reduce(
40
+ (sum, d) => sum + d.tokenCost,
41
+ 0
42
+ );
43
+ const highImpactDuplicates = actionableDuplicates.filter(
44
+ (d) => d.tokenCost > 1e3 || d.similarity > 0.7
45
+ ).length;
46
+ if (totalFilesAnalyzed === 0) {
47
+ return {
48
+ toolName: ToolName.PatternDetect,
49
+ score: 100,
50
+ rawMetrics: {
51
+ totalDuplicates: 0,
52
+ totalTokenCost: 0,
53
+ highImpactDuplicates: 0,
54
+ totalFilesAnalyzed: 0
55
+ },
56
+ factors: [],
57
+ recommendations: []
58
+ };
59
+ }
60
+ const duplicatesPerFile = totalDuplicates / totalFilesAnalyzed * 100;
61
+ const tokenWastePerFile = totalTokenCost / totalFilesAnalyzed;
62
+ const duplicatesPenalty = Math.min(60, duplicatesPerFile * 0.6);
63
+ const tokenPenalty = Math.min(40, tokenWastePerFile / 125);
64
+ const highImpactPenalty = highImpactDuplicates > 0 ? Math.min(15, highImpactDuplicates * 2 - 5) : -5;
65
+ const score = 100 - duplicatesPenalty - tokenPenalty - highImpactPenalty;
66
+ const finalScore = Math.max(0, Math.min(100, Math.round(score)));
67
+ const factors = [
68
+ {
69
+ name: "Duplication Density",
70
+ impact: -Math.round(duplicatesPenalty),
71
+ description: `${duplicatesPerFile.toFixed(1)} duplicates per 100 files`
72
+ },
73
+ {
74
+ name: "Token Waste",
75
+ impact: -Math.round(tokenPenalty),
76
+ description: `${Math.round(tokenWastePerFile)} tokens wasted per file`
77
+ }
78
+ ];
79
+ if (highImpactDuplicates > 0) {
80
+ factors.push({
81
+ name: "High-Impact Patterns",
82
+ impact: -Math.round(highImpactPenalty),
83
+ description: `${highImpactDuplicates} high-impact duplicates (>1000 tokens or >70% similar)`
84
+ });
85
+ } else {
86
+ factors.push({
87
+ name: "No High-Impact Patterns",
88
+ impact: 5,
89
+ description: "No severe duplicates detected"
90
+ });
91
+ }
92
+ const recommendations = [];
93
+ if (highImpactDuplicates > 0) {
94
+ const estimatedImpact = Math.min(15, highImpactDuplicates * 3);
95
+ recommendations.push({
96
+ action: `Deduplicate ${highImpactDuplicates} high-impact pattern${highImpactDuplicates > 1 ? "s" : ""}`,
97
+ estimatedImpact,
98
+ priority: "high"
99
+ });
100
+ }
101
+ if (totalDuplicates > 10 && duplicatesPerFile > 20) {
102
+ const estimatedImpact = Math.min(10, Math.round(duplicatesPenalty * 0.3));
103
+ recommendations.push({
104
+ action: "Extract common patterns into shared utilities",
105
+ estimatedImpact,
106
+ priority: "medium"
107
+ });
108
+ }
109
+ if (tokenWastePerFile > 2e3) {
110
+ const estimatedImpact = Math.min(8, Math.round(tokenPenalty * 0.4));
111
+ recommendations.push({
112
+ action: "Consolidate duplicated logic to reduce AI context waste",
113
+ estimatedImpact,
114
+ priority: totalTokenCost > 1e4 ? "high" : "medium"
115
+ });
116
+ }
117
+ const cfg = { ...DEFAULT_COST_CONFIG, ...costConfig };
118
+ const estimatedMonthlyCost = calculateMonthlyCost(totalTokenCost, cfg);
119
+ const issues = duplicates.map((d) => ({
120
+ severity: d.severity === "critical" ? "critical" : d.severity === "major" ? "major" : "minor"
121
+ }));
122
+ const productivityImpact = calculateProductivityImpact(issues);
123
+ return {
124
+ toolName: "pattern-detect",
125
+ score: finalScore,
126
+ rawMetrics: {
127
+ totalDuplicates,
128
+ totalTokenCost,
129
+ highImpactDuplicates,
130
+ totalFilesAnalyzed,
131
+ duplicatesPerFile: Math.round(duplicatesPerFile * 10) / 10,
132
+ tokenWastePerFile: Math.round(tokenWastePerFile),
133
+ // Business value metrics
134
+ estimatedMonthlyCost,
135
+ estimatedDeveloperHours: productivityImpact.totalHours
136
+ },
137
+ factors,
138
+ recommendations
139
+ };
140
+ }
141
+
142
+ export {
143
+ calculatePatternScore
144
+ };
@@ -0,0 +1,143 @@
1
+ import {
2
+ calculateSeverity
3
+ } from "./chunk-KDXWIT6W.mjs";
4
+
5
+ // src/detector.ts
6
+ import {
7
+ calculateStringSimilarity,
8
+ calculateHeuristicConfidence,
9
+ extractCodeBlocks
10
+ } from "@aiready/core";
11
+
12
+ // src/core/normalizer.ts
13
+ function normalizeCode(code, isPython = false) {
14
+ if (!code) return "";
15
+ let normalized = code;
16
+ if (isPython) {
17
+ normalized = normalized.replace(/#.*/g, "");
18
+ } else {
19
+ normalized = normalized.replace(/\/\/.*$/gm, "").replace(/\/\*[\s\S]*?\*\//g, "");
20
+ }
21
+ return normalized.replace(/"[^"]*"/g, '"STR"').replace(/'[^']*'/g, "'STR'").replace(/`[^`]*`/g, "`STR`").replace(/\b\d+\b/g, "NUM").replace(/\s+/g, " ").trim().toLowerCase();
22
+ }
23
+
24
+ // src/detector.ts
25
+ function extractBlocks(file, content) {
26
+ return extractCodeBlocks(file, content);
27
+ }
28
+ function calculateSimilarity(a, b) {
29
+ return calculateStringSimilarity(a, b);
30
+ }
31
+ function calculateConfidence(similarity, tokens, lines) {
32
+ return calculateHeuristicConfidence(similarity, tokens, lines);
33
+ }
34
+ async function detectDuplicatePatterns(fileContents, options) {
35
+ const {
36
+ minSimilarity,
37
+ minLines,
38
+ streamResults,
39
+ onProgress,
40
+ excludePatterns = [],
41
+ confidenceThreshold = 0,
42
+ ignoreWhitelist = []
43
+ } = options;
44
+ const allBlocks = [];
45
+ const excludeRegexes = excludePatterns.map((p) => new RegExp(p, "i"));
46
+ for (const { file, content } of fileContents) {
47
+ const blocks = extractBlocks(file, content);
48
+ for (const b of blocks) {
49
+ if (b.endLine - b.startLine + 1 < minLines) continue;
50
+ const isExcluded = excludeRegexes.some((regex) => regex.test(b.code));
51
+ if (isExcluded) continue;
52
+ allBlocks.push(b);
53
+ }
54
+ }
55
+ const duplicates = [];
56
+ const totalBlocks = allBlocks.length;
57
+ let comparisons = 0;
58
+ const totalComparisons = totalBlocks * (totalBlocks - 1) / 2;
59
+ if (onProgress) {
60
+ onProgress(
61
+ 0,
62
+ totalComparisons,
63
+ `Starting duplicate detection on ${totalBlocks} blocks...`
64
+ );
65
+ }
66
+ for (let i = 0; i < allBlocks.length; i++) {
67
+ if (i % 50 === 0 && i > 0) {
68
+ await new Promise((resolve) => setImmediate(resolve));
69
+ if (onProgress) {
70
+ onProgress(
71
+ comparisons,
72
+ totalComparisons,
73
+ `Analyzing blocks (${i}/${totalBlocks})...`
74
+ );
75
+ }
76
+ }
77
+ const b1 = allBlocks[i];
78
+ const isPython1 = b1.file.toLowerCase().endsWith(".py");
79
+ const norm1 = normalizeCode(b1.code, isPython1);
80
+ for (let j = i + 1; j < allBlocks.length; j++) {
81
+ comparisons++;
82
+ const b2 = allBlocks[j];
83
+ if (b1.file === b2.file) continue;
84
+ const isWhitelisted = ignoreWhitelist.some((pattern) => {
85
+ return b1.file.includes(pattern) && b2.file.includes(pattern) || pattern === `${b1.file}::${b2.file}` || pattern === `${b2.file}::${b1.file}`;
86
+ });
87
+ if (isWhitelisted) continue;
88
+ const isPython2 = b2.file.toLowerCase().endsWith(".py");
89
+ const norm2 = normalizeCode(b2.code, isPython2);
90
+ const sim = calculateSimilarity(norm1, norm2);
91
+ if (sim >= minSimilarity) {
92
+ const confidence = calculateConfidence(
93
+ sim,
94
+ b1.tokens,
95
+ b1.endLine - b1.startLine + 1
96
+ );
97
+ if (confidence < confidenceThreshold) continue;
98
+ const { severity, reason, suggestion, matchedRule } = calculateSeverity(
99
+ b1.file,
100
+ b2.file,
101
+ b1.code,
102
+ sim,
103
+ b1.endLine - b1.startLine + 1
104
+ );
105
+ const dup = {
106
+ file1: b1.file,
107
+ line1: b1.startLine,
108
+ endLine1: b1.endLine,
109
+ file2: b2.file,
110
+ line2: b2.startLine,
111
+ endLine2: b2.endLine,
112
+ code1: b1.code,
113
+ code2: b2.code,
114
+ similarity: sim,
115
+ confidence,
116
+ patternType: b1.patternType,
117
+ tokenCost: b1.tokens + b2.tokens,
118
+ severity,
119
+ reason,
120
+ suggestion,
121
+ matchedRule
122
+ };
123
+ duplicates.push(dup);
124
+ if (streamResults)
125
+ console.log(
126
+ `[DUPLICATE] ${dup.file1}:${dup.line1} <-> ${dup.file2}:${dup.line2} (${Math.round(sim * 100)}%, conf: ${Math.round(confidence * 100)}%)`
127
+ );
128
+ }
129
+ }
130
+ }
131
+ if (onProgress) {
132
+ onProgress(
133
+ totalComparisons,
134
+ totalComparisons,
135
+ `Duplicate detection complete. Found ${duplicates.length} patterns.`
136
+ );
137
+ }
138
+ return duplicates.sort((a, b) => b.similarity - a.similarity);
139
+ }
140
+
141
+ export {
142
+ detectDuplicatePatterns
143
+ };