@aiready/pattern-detect 0.17.8 → 0.17.12

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (190) hide show
  1. package/dist/analyzer-entry/index.d.mts +1 -1
  2. package/dist/analyzer-entry/index.d.ts +1 -1
  3. package/dist/analyzer-entry/index.js +370 -135
  4. package/dist/analyzer-entry/index.mjs +4 -3
  5. package/dist/chunk-2P7BQHGR.mjs +306 -0
  6. package/dist/{chunk-VGMM3L3O.mjs → chunk-3EORD7DC.mjs} +1 -1
  7. package/dist/{chunk-GREN7X5H.mjs → chunk-4PVPQMRT.mjs} +2 -2
  8. package/dist/{chunk-RS73WLNI.mjs → chunk-6VDL7TAS.mjs} +5 -113
  9. package/dist/chunk-AQIP4JGM.mjs +283 -0
  10. package/dist/{chunk-JBUZ6YHE.mjs → chunk-B4NLWKPZ.mjs} +85 -9
  11. package/dist/chunk-IPBGVPUX.mjs +143 -0
  12. package/dist/chunk-LUUJOUK5.mjs +283 -0
  13. package/dist/chunk-P3BOCGVV.mjs +498 -0
  14. package/dist/{scoring-entry.js → chunk-PHJE6A3J.mjs} +20 -37
  15. package/dist/chunk-PQXOORR4.mjs +234 -0
  16. package/dist/{chunk-GLKAGFKX.mjs → chunk-RDR75DVI.mjs} +85 -9
  17. package/dist/chunk-SXVLRPMF.mjs +143 -0
  18. package/dist/{chunk-DNZS4ESD.mjs → chunk-SY7RX5YQ.mjs} +85 -9
  19. package/dist/{context-rules-entry.js → chunk-TIBF7KST.mjs} +81 -78
  20. package/dist/chunk-WYYSQX5M.mjs +467 -0
  21. package/dist/{chunk-I6ETJC7L.mjs → chunk-X553BOMI.mjs} +56 -26
  22. package/dist/{chunk-K7BO57OO.mjs → chunk-Y6OB7K34.mjs} +80 -4
  23. package/dist/chunk-YLVV6YZ5.mjs +143 -0
  24. package/dist/chunk-ZUWPFVJV.mjs +115 -0
  25. package/dist/chunk-ZZMONVPE.mjs +467 -0
  26. package/dist/cli.js +402 -167
  27. package/dist/cli.mjs +4 -3
  28. package/dist/context-rules-entry/index.d.mts +35 -1
  29. package/dist/context-rules-entry/index.d.ts +35 -1
  30. package/dist/context-rules-entry/index.js +194 -48
  31. package/dist/context-rules-entry/index.mjs +1 -1
  32. package/dist/detector-entry/index.js +192 -46
  33. package/dist/detector-entry/index.mjs +2 -2
  34. package/dist/{analyzer-entry-BVz-HnZd.d.mts → index-B-pnXpgn.d.mts} +10 -1
  35. package/dist/{index-BwuoiCNm.d.ts → index-CWgYOKaK.d.ts} +35 -16
  36. package/dist/{index-BVz-HnZd.d.mts → index-Dl4BrGIT.d.mts} +35 -16
  37. package/dist/{analyzer-entry-BwuoiCNm.d.ts → index-DqS2e0kK.d.ts} +10 -1
  38. package/dist/index.d.mts +5 -6
  39. package/dist/index.d.ts +5 -6
  40. package/dist/index.js +467 -214
  41. package/dist/index.mjs +37 -22
  42. package/dist/scoring-entry/index.js +7 -3
  43. package/dist/scoring-entry/index.mjs +1 -1
  44. package/package.json +2 -2
  45. package/dist/analyzer-entry.d.mts +0 -100
  46. package/dist/analyzer-entry.d.ts +0 -100
  47. package/dist/analyzer-entry.js +0 -693
  48. package/dist/analyzer-entry.mjs +0 -12
  49. package/dist/chunk-262N2JB7.mjs +0 -497
  50. package/dist/chunk-2R7HOR5H.mjs +0 -777
  51. package/dist/chunk-3D7RVGHM.mjs +0 -64
  52. package/dist/chunk-3LS3E6MO.mjs +0 -508
  53. package/dist/chunk-3VRQYFW3.mjs +0 -782
  54. package/dist/chunk-3WK24ZOX.mjs +0 -860
  55. package/dist/chunk-3YYN6ZXN.mjs +0 -1038
  56. package/dist/chunk-4BPRGZRG.mjs +0 -1041
  57. package/dist/chunk-4UHDGB7U.mjs +0 -920
  58. package/dist/chunk-5LYDB7DY.mjs +0 -771
  59. package/dist/chunk-65G3HXLQ.mjs +0 -497
  60. package/dist/chunk-65UQ5J2J.mjs +0 -64
  61. package/dist/chunk-6JTVOBJX.mjs +0 -64
  62. package/dist/chunk-6OEHUI5J.mjs +0 -1045
  63. package/dist/chunk-6YUGU4P4.mjs +0 -914
  64. package/dist/chunk-7EJGNGXM.mjs +0 -771
  65. package/dist/chunk-7O2DUBSN.mjs +0 -1058
  66. package/dist/chunk-7S4AUL5S.mjs +0 -911
  67. package/dist/chunk-A76JUWER.mjs +0 -786
  68. package/dist/chunk-AJZUNNFH.mjs +0 -817
  69. package/dist/chunk-AXHGYYYZ.mjs +0 -404
  70. package/dist/chunk-BKRPSTT2.mjs +0 -64
  71. package/dist/chunk-BUBQ3W6W.mjs +0 -980
  72. package/dist/chunk-CCHM2VLK.mjs +0 -1051
  73. package/dist/chunk-CHFK6EBT.mjs +0 -419
  74. package/dist/chunk-CMT3MWWO.mjs +0 -948
  75. package/dist/chunk-CMWW24HW.mjs +0 -259
  76. package/dist/chunk-CTDBJP25.mjs +0 -1043
  77. package/dist/chunk-DGAKXYIP.mjs +0 -1041
  78. package/dist/chunk-DQSLTL7J.mjs +0 -788
  79. package/dist/chunk-DR5W7S3Z.mjs +0 -968
  80. package/dist/chunk-EFUKPMBE.mjs +0 -950
  81. package/dist/chunk-EVBFDILL.mjs +0 -927
  82. package/dist/chunk-EXORBAXR.mjs +0 -887
  83. package/dist/chunk-EZT3NZGB.mjs +0 -1057
  84. package/dist/chunk-FWUKMJEQ.mjs +0 -1133
  85. package/dist/chunk-GSJFORRO.mjs +0 -504
  86. package/dist/chunk-H4ADJYOG.mjs +0 -925
  87. package/dist/chunk-H5FB2USZ.mjs +0 -762
  88. package/dist/chunk-H73HEG7M.mjs +0 -670
  89. package/dist/chunk-HOS5Z2NC.mjs +0 -669
  90. package/dist/chunk-HXHQOQB5.mjs +0 -508
  91. package/dist/chunk-INEOYHUM.mjs +0 -911
  92. package/dist/chunk-INJ4SBTV.mjs +0 -754
  93. package/dist/chunk-J5CW6NYY.mjs +0 -64
  94. package/dist/chunk-JAFZCZAP.mjs +0 -776
  95. package/dist/chunk-JKVKOXYR.mjs +0 -407
  96. package/dist/chunk-JTHW7EYW.mjs +0 -1041
  97. package/dist/chunk-JWR3AHKO.mjs +0 -788
  98. package/dist/chunk-KC2CQMG2.mjs +0 -858
  99. package/dist/chunk-KDWGWBP5.mjs +0 -832
  100. package/dist/chunk-KPEK5REL.mjs +0 -919
  101. package/dist/chunk-KT6O2IAE.mjs +0 -861
  102. package/dist/chunk-KWMNN3TG.mjs +0 -391
  103. package/dist/chunk-LUA5FXSZ.mjs +0 -771
  104. package/dist/chunk-LYKRYBSM.mjs +0 -64
  105. package/dist/chunk-M4PQMW34.mjs +0 -480
  106. package/dist/chunk-MH6LBXZF.mjs +0 -816
  107. package/dist/chunk-MHU3CL4R.mjs +0 -64
  108. package/dist/chunk-MJWBS6SM.mjs +0 -1058
  109. package/dist/chunk-OFGMDX66.mjs +0 -402
  110. package/dist/chunk-P7B6Z4I2.mjs +0 -1043
  111. package/dist/chunk-PBCXSG7E.mjs +0 -658
  112. package/dist/chunk-PEEHSFDR.mjs +0 -1058
  113. package/dist/chunk-PSVG2NLH.mjs +0 -966
  114. package/dist/chunk-PWNQ6JZW.mjs +0 -508
  115. package/dist/chunk-QE4E3F7C.mjs +0 -410
  116. package/dist/chunk-QEP76HGK.mjs +0 -1039
  117. package/dist/chunk-QX2BQJEO.mjs +0 -1058
  118. package/dist/chunk-R2S73CVG.mjs +0 -503
  119. package/dist/chunk-RMGDSNLE.mjs +0 -770
  120. package/dist/chunk-S2KQFII2.mjs +0 -491
  121. package/dist/chunk-SLDK5PQK.mjs +0 -1129
  122. package/dist/chunk-SNSDVGWW.mjs +0 -783
  123. package/dist/chunk-SUUZMLPS.mjs +0 -391
  124. package/dist/chunk-SVCSIZ2A.mjs +0 -259
  125. package/dist/chunk-T2C6WS73.mjs +0 -670
  126. package/dist/chunk-TCG2G32F.mjs +0 -911
  127. package/dist/chunk-TGBZP7SB.mjs +0 -773
  128. package/dist/chunk-THF4RW63.mjs +0 -254
  129. package/dist/chunk-TJKDLVLN.mjs +0 -503
  130. package/dist/chunk-TXWPOVYU.mjs +0 -402
  131. package/dist/chunk-UB3CGOQ7.mjs +0 -64
  132. package/dist/chunk-UKIKN27B.mjs +0 -950
  133. package/dist/chunk-V5DP4FP6.mjs +0 -876
  134. package/dist/chunk-VRMXVYDZ.mjs +0 -419
  135. package/dist/chunk-WACZ5LFH.mjs +0 -1055
  136. package/dist/chunk-WC7CBAA7.mjs +0 -1058
  137. package/dist/chunk-WKBCNITM.mjs +0 -1072
  138. package/dist/chunk-WMOGJFME.mjs +0 -391
  139. package/dist/chunk-X4GR2N2M.mjs +0 -947
  140. package/dist/chunk-XCWY2DQY.mjs +0 -788
  141. package/dist/chunk-XJD35DS6.mjs +0 -1058
  142. package/dist/chunk-XNPID6FU.mjs +0 -391
  143. package/dist/chunk-XUUVS54V.mjs +0 -776
  144. package/dist/chunk-YCGV65F5.mjs +0 -508
  145. package/dist/chunk-YJYDBFT3.mjs +0 -780
  146. package/dist/chunk-YP3HEDQW.mjs +0 -859
  147. package/dist/chunk-YSDOUNJJ.mjs +0 -1142
  148. package/dist/chunk-Z6GBFFOV.mjs +0 -1040
  149. package/dist/cli.d.ts.map +0 -1
  150. package/dist/cli.js.map +0 -1
  151. package/dist/context-rules-entry-y2uJSngh.d.mts +0 -60
  152. package/dist/context-rules-entry-y2uJSngh.d.ts +0 -60
  153. package/dist/context-rules-entry.d.mts +0 -55
  154. package/dist/context-rules-entry.d.ts +0 -55
  155. package/dist/context-rules-entry.mjs +0 -12
  156. package/dist/context-rules.d.ts +0 -41
  157. package/dist/context-rules.d.ts.map +0 -1
  158. package/dist/context-rules.js +0 -225
  159. package/dist/context-rules.js.map +0 -1
  160. package/dist/detector-entry.d.mts +0 -14
  161. package/dist/detector-entry.d.ts +0 -14
  162. package/dist/detector-entry.js +0 -301
  163. package/dist/detector-entry.mjs +0 -7
  164. package/dist/detector.d.ts +0 -40
  165. package/dist/detector.d.ts.map +0 -1
  166. package/dist/detector.js +0 -385
  167. package/dist/detector.js.map +0 -1
  168. package/dist/extractors/python-extractor.d.ts +0 -19
  169. package/dist/extractors/python-extractor.d.ts.map +0 -1
  170. package/dist/extractors/python-extractor.js +0 -164
  171. package/dist/extractors/python-extractor.js.map +0 -1
  172. package/dist/grouping.d.ts +0 -54
  173. package/dist/grouping.d.ts.map +0 -1
  174. package/dist/grouping.js +0 -347
  175. package/dist/grouping.js.map +0 -1
  176. package/dist/index-y2uJSngh.d.mts +0 -60
  177. package/dist/index-y2uJSngh.d.ts +0 -60
  178. package/dist/index.d.ts.map +0 -1
  179. package/dist/index.js.map +0 -1
  180. package/dist/python-extractor-BGKGX6BK.mjs +0 -131
  181. package/dist/python-extractor-ELAKYK2W.mjs +0 -140
  182. package/dist/scoring-entry.d.mts +0 -23
  183. package/dist/scoring-entry.d.ts +0 -23
  184. package/dist/scoring-entry.mjs +0 -6
  185. package/dist/scoring.d.ts +0 -12
  186. package/dist/scoring.d.ts.map +0 -1
  187. package/dist/scoring.js +0 -116
  188. package/dist/scoring.js.map +0 -1
  189. package/dist/types-C4lmb2Yh.d.mts +0 -36
  190. package/dist/types-C4lmb2Yh.d.ts +0 -36
@@ -1,1142 +0,0 @@
1
- // src/index.ts
2
- import { readFileContent } from "@aiready/core";
3
-
4
- // src/detector.ts
5
- import { estimateTokens } from "@aiready/core";
6
-
7
- // src/context-rules.ts
8
- var CONTEXT_RULES = [
9
- // Test Fixtures - Intentional duplication for test isolation
10
- {
11
- name: "test-fixtures",
12
- detect: (file, code) => {
13
- const isTestFile = file.includes(".test.") || file.includes(".spec.") || file.includes("__tests__") || file.includes("/test/") || file.includes("/tests/");
14
- const hasTestFixtures = code.includes("beforeAll") || code.includes("afterAll") || code.includes("beforeEach") || code.includes("afterEach") || code.includes("setUp") || code.includes("tearDown");
15
- return isTestFile && hasTestFixtures;
16
- },
17
- severity: "info",
18
- reason: "Test fixture duplication is intentional for test isolation",
19
- suggestion: "Consider if shared test setup would improve maintainability without coupling tests"
20
- },
21
- // Email/Document Templates - Often intentionally similar for consistency
22
- {
23
- name: "templates",
24
- detect: (file, code) => {
25
- const isTemplate = file.includes("/templates/") || file.includes("-template") || file.includes("/email-templates/") || file.includes("/emails/");
26
- const hasTemplateContent = (code.includes("return") || code.includes("export")) && (code.includes("html") || code.includes("subject") || code.includes("body"));
27
- return isTemplate && hasTemplateContent;
28
- },
29
- severity: "minor",
30
- reason: "Template duplication may be intentional for maintainability and branding consistency",
31
- suggestion: "Extract shared structure only if templates become hard to maintain"
32
- },
33
- // E2E/Integration Test Page Objects - Test independence
34
- {
35
- name: "e2e-page-objects",
36
- detect: (file, code) => {
37
- const isE2ETest = file.includes("e2e/") || file.includes("/e2e/") || file.includes(".e2e.") || file.includes("/playwright/") || file.includes("playwright/") || file.includes("/cypress/") || file.includes("cypress/") || file.includes("/integration/") || file.includes("integration/");
38
- const hasPageObjectPatterns = code.includes("page.") || code.includes("await page") || code.includes("locator") || code.includes("getBy") || code.includes("selector") || code.includes("click(") || code.includes("fill(");
39
- return isE2ETest && hasPageObjectPatterns;
40
- },
41
- severity: "minor",
42
- reason: "E2E test duplication ensures test independence and reduces coupling",
43
- suggestion: "Consider page object pattern only if duplication causes maintenance issues"
44
- },
45
- // Configuration Files - Often necessarily similar by design
46
- {
47
- name: "config-files",
48
- detect: (file) => {
49
- return file.endsWith(".config.ts") || file.endsWith(".config.js") || file.includes("jest.config") || file.includes("vite.config") || file.includes("webpack.config") || file.includes("rollup.config") || file.includes("tsconfig");
50
- },
51
- severity: "minor",
52
- reason: "Configuration files often have similar structure by design",
53
- suggestion: "Consider shared config base only if configurations become hard to maintain"
54
- },
55
- // Type Definitions - Duplication for type safety and module independence
56
- {
57
- name: "type-definitions",
58
- detect: (file, code) => {
59
- const isTypeFile = file.endsWith(".d.ts") || file.includes("/types/");
60
- const hasTypeDefinitions = code.includes("interface ") || code.includes("type ") || code.includes("enum ");
61
- return isTypeFile && hasTypeDefinitions;
62
- },
63
- severity: "info",
64
- reason: "Type duplication may be intentional for module independence and type safety",
65
- suggestion: "Extract to shared types package only if causing maintenance burden"
66
- },
67
- // Migration Scripts - One-off scripts that are similar by nature
68
- {
69
- name: "migration-scripts",
70
- detect: (file) => {
71
- return file.includes("/migrations/") || file.includes("/migrate/") || file.includes(".migration.");
72
- },
73
- severity: "info",
74
- reason: "Migration scripts are typically one-off and intentionally similar",
75
- suggestion: "Duplication is acceptable for migration scripts"
76
- },
77
- // Mock Data - Test data intentionally duplicated
78
- {
79
- name: "mock-data",
80
- detect: (file, code) => {
81
- const isMockFile = file.includes("/mocks/") || file.includes("/__mocks__/") || file.includes("/fixtures/") || file.includes(".mock.") || file.includes(".fixture.");
82
- const hasMockData = code.includes("mock") || code.includes("Mock") || code.includes("fixture") || code.includes("stub") || code.includes("export const");
83
- return isMockFile && hasMockData;
84
- },
85
- severity: "info",
86
- reason: "Mock data duplication is expected for comprehensive test coverage",
87
- suggestion: "Consider shared factories only for complex mock generation"
88
- }
89
- ];
90
- function calculateSeverity(file1, file2, code, similarity, linesOfCode) {
91
- for (const rule of CONTEXT_RULES) {
92
- if (rule.detect(file1, code) || rule.detect(file2, code)) {
93
- return {
94
- severity: rule.severity,
95
- reason: rule.reason,
96
- suggestion: rule.suggestion,
97
- matchedRule: rule.name
98
- };
99
- }
100
- }
101
- if (similarity >= 0.95 && linesOfCode >= 30) {
102
- return {
103
- severity: "critical",
104
- reason: "Large nearly-identical code blocks waste tokens and create maintenance burden",
105
- suggestion: "Extract to shared utility module immediately"
106
- };
107
- } else if (similarity >= 0.95 && linesOfCode >= 15) {
108
- return {
109
- severity: "major",
110
- reason: "Nearly identical code should be consolidated",
111
- suggestion: "Move to shared utility file"
112
- };
113
- } else if (similarity >= 0.85) {
114
- return {
115
- severity: "major",
116
- reason: "High similarity indicates significant duplication",
117
- suggestion: "Extract common logic to shared function"
118
- };
119
- } else if (similarity >= 0.7) {
120
- return {
121
- severity: "minor",
122
- reason: "Moderate similarity detected",
123
- suggestion: "Consider extracting shared patterns if code evolves together"
124
- };
125
- } else {
126
- return {
127
- severity: "minor",
128
- reason: "Minor similarity detected",
129
- suggestion: "Monitor but refactoring may not be worthwhile"
130
- };
131
- }
132
- }
133
- function getSeverityLabel(severity) {
134
- const labels = {
135
- critical: "\u{1F534} CRITICAL",
136
- major: "\u{1F7E1} MAJOR",
137
- minor: "\u{1F535} MINOR",
138
- info: "\u2139\uFE0F INFO"
139
- };
140
- return labels[severity];
141
- }
142
- function filterBySeverity(duplicates, minSeverity) {
143
- const severityOrder = ["info", "minor", "major", "critical"];
144
- const minIndex = severityOrder.indexOf(minSeverity);
145
- if (minIndex === -1) return duplicates;
146
- return duplicates.filter((dup) => {
147
- const dupIndex = severityOrder.indexOf(dup.severity);
148
- return dupIndex >= minIndex;
149
- });
150
- }
151
-
152
- // src/detector.ts
153
- function categorizePattern(code) {
154
- const lower = code.toLowerCase();
155
- if (lower.includes("request") && lower.includes("response") || lower.includes("router.") || lower.includes("app.get") || lower.includes("app.post") || lower.includes("express") || lower.includes("ctx.body")) {
156
- return "api-handler";
157
- }
158
- if (lower.includes("validate") || lower.includes("schema") || lower.includes("zod") || lower.includes("yup") || lower.includes("if") && lower.includes("throw")) {
159
- return "validator";
160
- }
161
- if (lower.includes("return (") || lower.includes("jsx") || lower.includes("component") || lower.includes("props")) {
162
- return "component";
163
- }
164
- if (lower.includes("class ") || lower.includes("this.")) {
165
- return "class-method";
166
- }
167
- if (lower.includes("return ") && !lower.includes("this") && !lower.includes("new ")) {
168
- return "utility";
169
- }
170
- if (lower.includes("function") || lower.includes("=>")) {
171
- return "function";
172
- }
173
- return "unknown";
174
- }
175
- function extractCodeBlocks(content, minLines) {
176
- const lines = content.split("\n");
177
- const blocks = [];
178
- let currentBlock = [];
179
- let blockStart = 0;
180
- let braceDepth = 0;
181
- let inFunction = false;
182
- for (let i = 0; i < lines.length; i++) {
183
- const line = lines[i];
184
- const trimmed = line.trim();
185
- if (!inFunction && (trimmed.includes("function ") || trimmed.includes("=>") || trimmed.includes("async ") || /^(export\s+)?(async\s+)?function\s+/.test(trimmed) || /^(export\s+)?const\s+\w+\s*=\s*(async\s*)?\(/.test(trimmed))) {
186
- inFunction = true;
187
- blockStart = i;
188
- }
189
- for (const char of line) {
190
- if (char === "{") braceDepth++;
191
- if (char === "}") braceDepth--;
192
- }
193
- if (inFunction) {
194
- currentBlock.push(line);
195
- }
196
- if (inFunction && braceDepth === 0 && currentBlock.length >= minLines) {
197
- const blockContent = currentBlock.join("\n");
198
- const linesOfCode = currentBlock.filter(
199
- (l) => l.trim() && !l.trim().startsWith("//")
200
- ).length;
201
- blocks.push({
202
- content: blockContent,
203
- startLine: blockStart + 1,
204
- endLine: i + 1,
205
- patternType: categorizePattern(blockContent),
206
- linesOfCode
207
- });
208
- currentBlock = [];
209
- inFunction = false;
210
- } else if (inFunction && braceDepth === 0) {
211
- currentBlock = [];
212
- inFunction = false;
213
- }
214
- }
215
- return blocks;
216
- }
217
- function normalizeCode(code) {
218
- if (!code) {
219
- return "";
220
- }
221
- return code.replace(/\/\/.*$/gm, "").replace(/\/\*[\s\S]*?\*\//g, "").replace(/"[^"]*"/g, '"STR"').replace(/'[^']*'/g, "'STR'").replace(/`[^`]*`/g, "`STR`").replace(/\b\d+\b/g, "NUM").replace(/\s+/g, " ").trim();
222
- }
223
- function jaccardSimilarity(tokens1, tokens2) {
224
- const set1 = new Set(tokens1);
225
- const set2 = new Set(tokens2);
226
- let intersection = 0;
227
- for (const token of set1) {
228
- if (set2.has(token)) intersection++;
229
- }
230
- const union = set1.size + set2.size - intersection;
231
- return union === 0 ? 0 : intersection / union;
232
- }
233
- async function detectDuplicatePatterns(files, options) {
234
- const {
235
- minSimilarity,
236
- minLines,
237
- batchSize = 100,
238
- approx = true,
239
- minSharedTokens = 8,
240
- maxCandidatesPerBlock = 100,
241
- streamResults = false
242
- } = options;
243
- const duplicates = [];
244
- const maxComparisons = approx ? Infinity : 5e5;
245
- const allBlocks = files.flatMap(
246
- (file) => extractCodeBlocks(file.content, minLines).filter((block) => block.content && block.content.trim().length > 0).map((block) => ({
247
- content: block.content,
248
- startLine: block.startLine,
249
- endLine: block.endLine,
250
- file: file.file,
251
- normalized: normalizeCode(block.content),
252
- patternType: block.patternType,
253
- tokenCost: estimateTokens(block.content),
254
- linesOfCode: block.linesOfCode
255
- }))
256
- );
257
- if (!options.onProgress) {
258
- console.log(`Extracted ${allBlocks.length} code blocks for analysis`);
259
- }
260
- const pythonFiles = files.filter((f) => f.file.toLowerCase().endsWith(".py"));
261
- if (pythonFiles.length > 0) {
262
- const { extractPythonPatterns } = await import("./python-extractor-ELAKYK2W.mjs");
263
- const patterns = await extractPythonPatterns(
264
- pythonFiles.map((f) => f.file)
265
- );
266
- const pythonBlocks = patterns.filter((p) => p.code && p.code.trim().length > 0).map((p) => ({
267
- content: p.code,
268
- startLine: p.startLine,
269
- endLine: p.endLine,
270
- file: p.file,
271
- normalized: normalizeCode(p.code),
272
- patternType: p.type,
273
- tokenCost: estimateTokens(p.code),
274
- linesOfCode: p.endLine - p.startLine + 1
275
- }));
276
- allBlocks.push(...pythonBlocks);
277
- if (!options.onProgress) {
278
- console.log(`Added ${pythonBlocks.length} Python patterns`);
279
- }
280
- }
281
- if (!approx && allBlocks.length > 500) {
282
- console.log(
283
- `\u26A0\uFE0F Using --no-approx mode with ${allBlocks.length} blocks may be slow (O(B\xB2) complexity).`
284
- );
285
- console.log(
286
- ` Consider using approximate mode (default) for better performance.`
287
- );
288
- }
289
- const stopwords = /* @__PURE__ */ new Set([
290
- "return",
291
- "const",
292
- "let",
293
- "var",
294
- "function",
295
- "class",
296
- "new",
297
- "if",
298
- "else",
299
- "for",
300
- "while",
301
- "async",
302
- "await",
303
- "try",
304
- "catch",
305
- "switch",
306
- "case",
307
- "default",
308
- "import",
309
- "export",
310
- "from",
311
- "true",
312
- "false",
313
- "null",
314
- "undefined",
315
- "this"
316
- ]);
317
- const tokenize = (norm) => {
318
- const punctuation = "(){}[];.,";
319
- const cleaned = norm.split("").map((ch) => punctuation.includes(ch) ? " " : ch).join("");
320
- return cleaned.split(/\s+/).filter((t) => t && t.length >= 3 && !stopwords.has(t.toLowerCase()));
321
- };
322
- const blockTokens = allBlocks.map((b) => tokenize(b.normalized));
323
- const invertedIndex = /* @__PURE__ */ new Map();
324
- if (approx) {
325
- for (let i = 0; i < blockTokens.length; i++) {
326
- for (const tok of blockTokens[i]) {
327
- let arr = invertedIndex.get(tok);
328
- if (!arr) {
329
- arr = [];
330
- invertedIndex.set(tok, arr);
331
- }
332
- arr.push(i);
333
- }
334
- }
335
- }
336
- const totalComparisons = approx ? void 0 : allBlocks.length * (allBlocks.length - 1) / 2;
337
- if (totalComparisons !== void 0) {
338
- console.log(
339
- `Processing ${totalComparisons.toLocaleString()} comparisons in batches...`
340
- );
341
- } else {
342
- console.log(
343
- `Using approximate candidate selection to reduce comparisons...`
344
- );
345
- }
346
- let comparisonsProcessed = 0;
347
- let comparisonsBudgetExhausted = false;
348
- const startTime = Date.now();
349
- for (let i = 0; i < allBlocks.length; i++) {
350
- if (maxComparisons && comparisonsProcessed >= maxComparisons) {
351
- comparisonsBudgetExhausted = true;
352
- break;
353
- }
354
- if (i % batchSize === 0 && i > 0) {
355
- if (options.onProgress) {
356
- options.onProgress(i, allBlocks.length, `pattern-detect: analyzing blocks`);
357
- } else {
358
- const elapsed = ((Date.now() - startTime) / 1e3).toFixed(1);
359
- const duplicatesFound = duplicates.length;
360
- if (totalComparisons !== void 0) {
361
- const progress = (comparisonsProcessed / totalComparisons * 100).toFixed(1);
362
- const remaining = totalComparisons - comparisonsProcessed;
363
- const rate = comparisonsProcessed / parseFloat(elapsed);
364
- const eta = remaining > 0 ? (remaining / rate).toFixed(0) : 0;
365
- console.log(
366
- ` ${progress}% (${comparisonsProcessed.toLocaleString()}/${totalComparisons.toLocaleString()} comparisons, ${elapsed}s elapsed, ~${eta}s remaining, ${duplicatesFound} duplicates)`
367
- );
368
- } else {
369
- console.log(
370
- ` Processed ${i.toLocaleString()}/${allBlocks.length} blocks (${elapsed}s elapsed, ${duplicatesFound} duplicates)`
371
- );
372
- }
373
- }
374
- await new Promise((resolve) => setImmediate(resolve));
375
- }
376
- const block1 = allBlocks[i];
377
- let candidates = null;
378
- if (approx) {
379
- const counts = /* @__PURE__ */ new Map();
380
- const block1Tokens = new Set(blockTokens[i]);
381
- const block1Size = block1Tokens.size;
382
- const rareTokens = blockTokens[i].filter((tok) => {
383
- const blocksWithToken = invertedIndex.get(tok)?.length || 0;
384
- return blocksWithToken < allBlocks.length * 0.1;
385
- });
386
- for (const tok of rareTokens) {
387
- const ids = invertedIndex.get(tok);
388
- if (!ids) continue;
389
- for (const j of ids) {
390
- if (j <= i) continue;
391
- if (allBlocks[j].file === block1.file) continue;
392
- counts.set(j, (counts.get(j) || 0) + 1);
393
- }
394
- }
395
- candidates = Array.from(counts.entries()).filter(([j, shared]) => {
396
- const block2Tokens = blockTokens[j];
397
- const block2Size = block2Tokens.length;
398
- const minSize = Math.min(block1Size, block2Size);
399
- const sharedPercentage = shared / minSize;
400
- return shared >= minSharedTokens && sharedPercentage >= 0.3;
401
- }).sort((a, b) => b[1] - a[1]).slice(0, Math.min(maxCandidatesPerBlock, 5)).map(([j, shared]) => ({ j, shared }));
402
- }
403
- if (approx && candidates) {
404
- for (const { j } of candidates) {
405
- if (!approx && maxComparisons !== Infinity && comparisonsProcessed >= maxComparisons) {
406
- console.log(
407
- `\u26A0\uFE0F Comparison safety limit reached (${maxComparisons.toLocaleString()} comparisons in --no-approx mode).`
408
- );
409
- console.log(
410
- ` This prevents excessive runtime on large repos. Consider using approximate mode (default) or --min-lines to reduce blocks.`
411
- );
412
- break;
413
- }
414
- comparisonsProcessed++;
415
- const block2 = allBlocks[j];
416
- const similarity = jaccardSimilarity(blockTokens[i], blockTokens[j]);
417
- if (similarity >= minSimilarity) {
418
- const { severity, reason, suggestion, matchedRule } = calculateSeverity(
419
- block1.file,
420
- block2.file,
421
- block1.content,
422
- similarity,
423
- block1.linesOfCode
424
- );
425
- const duplicate = {
426
- file1: block1.file,
427
- file2: block2.file,
428
- line1: block1.startLine,
429
- line2: block2.startLine,
430
- endLine1: block1.endLine,
431
- endLine2: block2.endLine,
432
- similarity,
433
- snippet: block1.content.split("\n").slice(0, 5).join("\n") + "\n...",
434
- patternType: block1.patternType,
435
- tokenCost: block1.tokenCost + block2.tokenCost,
436
- linesOfCode: block1.linesOfCode,
437
- severity,
438
- reason,
439
- suggestion,
440
- matchedRule
441
- };
442
- duplicates.push(duplicate);
443
- if (streamResults) {
444
- console.log(
445
- `
446
- \u2705 Found: ${duplicate.patternType} ${Math.round(similarity * 100)}% similar`
447
- );
448
- console.log(
449
- ` ${duplicate.file1}:${duplicate.line1}-${duplicate.endLine1} \u21D4 ${duplicate.file2}:${duplicate.line2}-${duplicate.endLine2}`
450
- );
451
- console.log(
452
- ` Token cost: ${duplicate.tokenCost.toLocaleString()}`
453
- );
454
- }
455
- }
456
- }
457
- } else {
458
- for (let j = i + 1; j < allBlocks.length; j++) {
459
- if (maxComparisons && comparisonsProcessed >= maxComparisons) break;
460
- comparisonsProcessed++;
461
- const block2 = allBlocks[j];
462
- if (block1.file === block2.file) continue;
463
- const similarity = jaccardSimilarity(blockTokens[i], blockTokens[j]);
464
- if (similarity >= minSimilarity) {
465
- const { severity, reason, suggestion, matchedRule } = calculateSeverity(
466
- block1.file,
467
- block2.file,
468
- block1.content,
469
- similarity,
470
- block1.linesOfCode
471
- );
472
- const duplicate = {
473
- file1: block1.file,
474
- file2: block2.file,
475
- line1: block1.startLine,
476
- line2: block2.startLine,
477
- endLine1: block1.endLine,
478
- endLine2: block2.endLine,
479
- similarity,
480
- snippet: block1.content.split("\n").slice(0, 5).join("\n") + "\n...",
481
- patternType: block1.patternType,
482
- tokenCost: block1.tokenCost + block2.tokenCost,
483
- linesOfCode: block1.linesOfCode,
484
- severity,
485
- reason,
486
- suggestion,
487
- matchedRule
488
- };
489
- duplicates.push(duplicate);
490
- if (streamResults) {
491
- console.log(
492
- `
493
- \u2705 Found: ${duplicate.patternType} ${Math.round(similarity * 100)}% similar`
494
- );
495
- console.log(
496
- ` ${duplicate.file1}:${duplicate.line1}-${duplicate.endLine1} \u21D4 ${duplicate.file2}:${duplicate.line2}-${duplicate.endLine2}`
497
- );
498
- console.log(
499
- ` Token cost: ${duplicate.tokenCost.toLocaleString()}`
500
- );
501
- }
502
- }
503
- }
504
- }
505
- }
506
- if (comparisonsBudgetExhausted) {
507
- console.log(
508
- `\u26A0\uFE0F Comparison budget exhausted (${maxComparisons.toLocaleString()} comparisons). Use --max-comparisons to increase.`
509
- );
510
- }
511
- return duplicates.sort(
512
- (a, b) => b.similarity - a.similarity || b.tokenCost - a.tokenCost
513
- );
514
- }
515
-
516
- // src/grouping.ts
517
- function normalizeFilePair(file1, file2) {
518
- return file1 < file2 ? `${file1}::${file2}` : `${file2}::${file1}`;
519
- }
520
- function rangesOverlap(start1, end1, start2, end2, tolerance = 5) {
521
- return start1 <= end2 + tolerance && start2 <= end1 + tolerance;
522
- }
523
- function groupDuplicatesByFilePair(duplicates) {
524
- const groups = /* @__PURE__ */ new Map();
525
- for (const dup of duplicates) {
526
- const key = normalizeFilePair(dup.file1, dup.file2);
527
- if (!groups.has(key)) {
528
- groups.set(key, []);
529
- }
530
- groups.get(key).push(dup);
531
- }
532
- const result = [];
533
- for (const [filePair, groupDups] of groups.entries()) {
534
- const deduplicated = deduplicateOverlappingRanges(groupDups);
535
- const totalTokenCost = deduplicated.reduce(
536
- (sum, d) => sum + d.tokenCost,
537
- 0
538
- );
539
- const averageSimilarity = deduplicated.reduce((sum, d) => sum + d.similarity, 0) / deduplicated.length;
540
- const maxSimilarity = Math.max(...deduplicated.map((d) => d.similarity));
541
- const severity = getHighestSeverity(deduplicated.map((d) => d.severity));
542
- const patternType = getMostCommonPatternType(deduplicated);
543
- const lineRanges = deduplicated.map((d) => ({
544
- file1: { start: d.line1, end: d.endLine1 },
545
- file2: { start: d.line2, end: d.endLine2 }
546
- }));
547
- result.push({
548
- filePair,
549
- duplicates: deduplicated,
550
- totalTokenCost,
551
- averageSimilarity,
552
- maxSimilarity,
553
- severity,
554
- patternType,
555
- occurrences: deduplicated.length,
556
- lineRanges
557
- });
558
- }
559
- return result.sort((a, b) => b.totalTokenCost - a.totalTokenCost);
560
- }
561
- function deduplicateOverlappingRanges(duplicates) {
562
- if (duplicates.length === 0) return [];
563
- const sorted = [...duplicates].sort((a, b) => {
564
- if (a.line1 !== b.line1) return a.line1 - b.line1;
565
- return b.similarity - a.similarity;
566
- });
567
- const result = [];
568
- let current = null;
569
- for (const dup of sorted) {
570
- if (!current) {
571
- current = dup;
572
- result.push(dup);
573
- continue;
574
- }
575
- const overlapsFile1 = rangesOverlap(
576
- current.line1,
577
- current.endLine1,
578
- dup.line1,
579
- dup.endLine1
580
- );
581
- const overlapsFile2 = rangesOverlap(
582
- current.line2,
583
- current.endLine2,
584
- dup.line2,
585
- dup.endLine2
586
- );
587
- if (overlapsFile1 && overlapsFile2) {
588
- current = {
589
- ...current,
590
- endLine1: Math.max(current.endLine1, dup.endLine1),
591
- endLine2: Math.max(current.endLine2, dup.endLine2),
592
- tokenCost: Math.max(current.tokenCost, dup.tokenCost)
593
- };
594
- result[result.length - 1] = current;
595
- } else {
596
- current = dup;
597
- result.push(dup);
598
- }
599
- }
600
- return result;
601
- }
602
- function createRefactorClusters(duplicates) {
603
- const clusters = /* @__PURE__ */ new Map();
604
- for (const dup of duplicates) {
605
- const clusterId = identifyCluster(dup);
606
- if (!clusters.has(clusterId)) {
607
- clusters.set(clusterId, []);
608
- }
609
- clusters.get(clusterId).push(dup);
610
- }
611
- const result = [];
612
- for (const [clusterId, clusterDups] of clusters.entries()) {
613
- if (clusterDups.length < 2) continue;
614
- const files = getUniqueFiles(clusterDups);
615
- const totalTokenCost = clusterDups.reduce((sum, d) => sum + d.tokenCost, 0);
616
- const averageSimilarity = clusterDups.reduce((sum, d) => sum + d.similarity, 0) / clusterDups.length;
617
- const severity = getHighestSeverity(clusterDups.map((d) => d.severity));
618
- const patternType = getMostCommonPatternType(clusterDups);
619
- const clusterInfo = getClusterInfo(clusterId, patternType, files.length);
620
- result.push({
621
- id: clusterId,
622
- name: clusterInfo.name,
623
- files,
624
- patternType,
625
- severity,
626
- totalTokenCost,
627
- averageSimilarity,
628
- duplicateCount: clusterDups.length,
629
- suggestion: clusterInfo.suggestion,
630
- reason: clusterInfo.reason
631
- });
632
- }
633
- return result.sort((a, b) => b.totalTokenCost - a.totalTokenCost);
634
- }
635
- function identifyCluster(dup) {
636
- const file1 = dup.file1.toLowerCase();
637
- const file2 = dup.file2.toLowerCase();
638
- if ((file1.includes("/blog/") || file1.startsWith("blog/") || file1.includes("/articles/") || file1.startsWith("articles/")) && (file2.includes("/blog/") || file2.startsWith("blog/") || file2.includes("/articles/") || file2.startsWith("articles/"))) {
639
- return "blog-seo-boilerplate";
640
- }
641
- if ((file1.includes("/components/") || file1.startsWith("components/")) && (file2.includes("/components/") || file2.startsWith("components/")) && dup.patternType === "component") {
642
- const component1 = extractComponentName(dup.file1);
643
- const component2 = extractComponentName(dup.file2);
644
- console.log(
645
- `Component check: ${dup.file1} -> ${component1}, ${dup.file2} -> ${component2}`
646
- );
647
- if (component1 && component2 && areSimilarComponents(component1, component2)) {
648
- const category = getComponentCategory(component1);
649
- console.log(`Creating cluster: component-${category}`);
650
- return `component-${category}`;
651
- }
652
- }
653
- if ((file1.includes("/e2e/") || file1.startsWith("e2e/") || file1.includes(".e2e.")) && (file2.includes("/e2e/") || file2.startsWith("e2e/") || file2.includes(".e2e."))) {
654
- return "e2e-test-patterns";
655
- }
656
- if (dup.patternType === "api-handler") {
657
- return "api-handlers";
658
- }
659
- if (dup.patternType === "validator") {
660
- return "validators";
661
- }
662
- if ((file1.includes("/scripts/") || file1.startsWith("scripts/") || file1.includes("/infra/") || file1.startsWith("infra/")) && (file2.includes("/scripts/") || file2.startsWith("scripts/") || file2.includes("/infra/") || file2.startsWith("infra/"))) {
663
- return "infrastructure-scripts";
664
- }
665
- return `${dup.patternType}-patterns`;
666
- }
667
- function extractComponentName(filePath) {
668
- const match = filePath.match(/[/\\]?([A-Z][a-zA-Z0-9]*)\.(tsx|jsx|ts|js)$/);
669
- return match ? match[1] : null;
670
- }
671
- function areSimilarComponents(name1, name2) {
672
- const category1 = getComponentCategory(name1);
673
- const category2 = getComponentCategory(name2);
674
- return category1 === category2;
675
- }
676
- function getComponentCategory(name) {
677
- name = name.toLowerCase();
678
- if (name.includes("button") || name.includes("btn")) return "button";
679
- if (name.includes("card")) return "card";
680
- if (name.includes("modal") || name.includes("dialog")) return "modal";
681
- if (name.includes("form")) return "form";
682
- if (name.includes("input") || name.includes("field")) return "input";
683
- if (name.includes("table") || name.includes("grid")) return "table";
684
- if (name.includes("nav") || name.includes("menu")) return "navigation";
685
- if (name.includes("header") || name.includes("footer")) return "layout";
686
- return "misc";
687
- }
688
- function getUniqueFiles(duplicates) {
689
- const files = /* @__PURE__ */ new Set();
690
- for (const dup of duplicates) {
691
- files.add(dup.file1);
692
- files.add(dup.file2);
693
- }
694
- return Array.from(files).sort();
695
- }
696
- function getHighestSeverity(severities) {
697
- const order = {
698
- critical: 4,
699
- major: 3,
700
- minor: 2,
701
- info: 1
702
- };
703
- let highest = "info";
704
- let highestValue = 0;
705
- for (const severity of severities) {
706
- if (order[severity] > highestValue) {
707
- highestValue = order[severity];
708
- highest = severity;
709
- }
710
- }
711
- return highest;
712
- }
713
- function getMostCommonPatternType(duplicates) {
714
- const counts = /* @__PURE__ */ new Map();
715
- for (const dup of duplicates) {
716
- counts.set(dup.patternType, (counts.get(dup.patternType) || 0) + 1);
717
- }
718
- let mostCommon = "unknown";
719
- let maxCount = 0;
720
- for (const [type, count] of counts.entries()) {
721
- if (count > maxCount) {
722
- maxCount = count;
723
- mostCommon = type;
724
- }
725
- }
726
- return mostCommon;
727
- }
728
- function getClusterInfo(clusterId, patternType, fileCount) {
729
- const templates = {
730
- "blog-seo-boilerplate": {
731
- name: `Blog SEO Boilerplate (${fileCount} files)`,
732
- suggestion: "Create BlogPageLayout component with SEO schema generator, breadcrumb component, and metadata helpers",
733
- reason: "SEO boilerplate duplication increases maintenance burden and schema consistency risk"
734
- },
735
- "e2e-test-patterns": {
736
- name: `E2E Test Patterns (${fileCount} files)`,
737
- suggestion: "Extract page object helpers and common test utilities (waitFor, fillForm, etc.)",
738
- reason: "Test helper extraction improves maintainability while preserving test independence"
739
- },
740
- "api-handlers": {
741
- name: `API Handler Patterns (${fileCount} files)`,
742
- suggestion: "Extract common middleware, error handling, and response formatting",
743
- reason: "API handler duplication leads to inconsistent error handling and response formats"
744
- },
745
- validators: {
746
- name: `Validator Patterns (${fileCount} files)`,
747
- suggestion: "Consolidate into shared schema validators (Zod/Yup) with reusable rules",
748
- reason: "Validator duplication causes inconsistent validation and harder maintenance"
749
- },
750
- "infrastructure-scripts": {
751
- name: `Infrastructure Scripts (${fileCount} files)`,
752
- suggestion: "Extract common CLI parsing, file I/O, and error handling utilities",
753
- reason: "Script duplication is often acceptable for one-off tasks, but common patterns can be shared"
754
- },
755
- "component-button": {
756
- name: `Button Component Variants (${fileCount} files)`,
757
- suggestion: "Create unified Button component with variant props",
758
- reason: "Multiple button variants should share base styles and behavior"
759
- },
760
- "component-card": {
761
- name: `Card Component Variants (${fileCount} files)`,
762
- suggestion: "Create unified Card component with composition pattern",
763
- reason: "Card variants should share layout structure and styling"
764
- },
765
- "component-modal": {
766
- name: `Modal Component Variants (${fileCount} files)`,
767
- suggestion: "Create base Modal component with customizable content",
768
- reason: "Modal variants should share overlay, animation, and accessibility logic"
769
- }
770
- };
771
- if (templates[clusterId]) {
772
- return templates[clusterId];
773
- }
774
- return {
775
- name: `${patternType} Cluster (${fileCount} files)`,
776
- suggestion: `Extract common ${patternType} patterns into shared utilities`,
777
- reason: `Multiple similar ${patternType} patterns detected across ${fileCount} files`
778
- };
779
- }
780
- function filterClustersByImpact(clusters, minTokenCost = 1e3, minFileCount = 3) {
781
- return clusters.filter(
782
- (cluster) => cluster.totalTokenCost >= minTokenCost || cluster.files.length >= minFileCount
783
- );
784
- }
785
-
786
- // src/scoring.ts
787
- import {
788
- calculateMonthlyCost,
789
- calculateProductivityImpact,
790
- DEFAULT_COST_CONFIG
791
- } from "@aiready/core";
792
- function calculatePatternScore(duplicates, totalFilesAnalyzed, costConfig) {
793
- const totalDuplicates = duplicates.length;
794
- const totalTokenCost = duplicates.reduce((sum, d) => sum + d.tokenCost, 0);
795
- const highImpactDuplicates = duplicates.filter(
796
- (d) => d.tokenCost > 1e3 || d.similarity > 0.7
797
- ).length;
798
- if (totalFilesAnalyzed === 0) {
799
- return {
800
- toolName: "pattern-detect",
801
- score: 100,
802
- rawMetrics: {
803
- totalDuplicates: 0,
804
- totalTokenCost: 0,
805
- highImpactDuplicates: 0,
806
- totalFilesAnalyzed: 0
807
- },
808
- factors: [],
809
- recommendations: []
810
- };
811
- }
812
- const duplicatesPerFile = totalDuplicates / totalFilesAnalyzed * 100;
813
- const tokenWastePerFile = totalTokenCost / totalFilesAnalyzed;
814
- const duplicatesPenalty = Math.min(60, duplicatesPerFile * 0.6);
815
- const tokenPenalty = Math.min(40, tokenWastePerFile / 125);
816
- const highImpactPenalty = highImpactDuplicates > 0 ? Math.min(15, highImpactDuplicates * 2 - 5) : -5;
817
- const score = 100 - duplicatesPenalty - tokenPenalty - highImpactPenalty;
818
- const finalScore = Math.max(0, Math.min(100, Math.round(score)));
819
- const factors = [
820
- {
821
- name: "Duplication Density",
822
- impact: -Math.round(duplicatesPenalty),
823
- description: `${duplicatesPerFile.toFixed(1)} duplicates per 100 files`
824
- },
825
- {
826
- name: "Token Waste",
827
- impact: -Math.round(tokenPenalty),
828
- description: `${Math.round(tokenWastePerFile)} tokens wasted per file`
829
- }
830
- ];
831
- if (highImpactDuplicates > 0) {
832
- factors.push({
833
- name: "High-Impact Patterns",
834
- impact: -Math.round(highImpactPenalty),
835
- description: `${highImpactDuplicates} high-impact duplicates (>1000 tokens or >70% similar)`
836
- });
837
- } else {
838
- factors.push({
839
- name: "No High-Impact Patterns",
840
- impact: 5,
841
- description: "No severe duplicates detected"
842
- });
843
- }
844
- const recommendations = [];
845
- if (highImpactDuplicates > 0) {
846
- const estimatedImpact = Math.min(15, highImpactDuplicates * 3);
847
- recommendations.push({
848
- action: `Deduplicate ${highImpactDuplicates} high-impact pattern${highImpactDuplicates > 1 ? "s" : ""}`,
849
- estimatedImpact,
850
- priority: "high"
851
- });
852
- }
853
- if (totalDuplicates > 10 && duplicatesPerFile > 20) {
854
- const estimatedImpact = Math.min(10, Math.round(duplicatesPenalty * 0.3));
855
- recommendations.push({
856
- action: "Extract common patterns into shared utilities",
857
- estimatedImpact,
858
- priority: "medium"
859
- });
860
- }
861
- if (tokenWastePerFile > 2e3) {
862
- const estimatedImpact = Math.min(8, Math.round(tokenPenalty * 0.4));
863
- recommendations.push({
864
- action: "Consolidate duplicated logic to reduce AI context waste",
865
- estimatedImpact,
866
- priority: totalTokenCost > 1e4 ? "high" : "medium"
867
- });
868
- }
869
- const cfg = { ...DEFAULT_COST_CONFIG, ...costConfig };
870
- const estimatedMonthlyCost = calculateMonthlyCost(totalTokenCost, cfg);
871
- const issues = duplicates.map((d) => ({
872
- severity: d.severity === "critical" ? "critical" : d.severity === "major" ? "major" : "minor"
873
- }));
874
- const productivityImpact = calculateProductivityImpact(issues);
875
- return {
876
- toolName: "pattern-detect",
877
- score: finalScore,
878
- rawMetrics: {
879
- totalDuplicates,
880
- totalTokenCost,
881
- highImpactDuplicates,
882
- totalFilesAnalyzed,
883
- duplicatesPerFile: Math.round(duplicatesPerFile * 10) / 10,
884
- tokenWastePerFile: Math.round(tokenWastePerFile),
885
- // Business value metrics
886
- estimatedMonthlyCost,
887
- estimatedDeveloperHours: productivityImpact.totalHours
888
- },
889
- factors,
890
- recommendations
891
- };
892
- }
893
-
894
- // src/index.ts
895
- function getRefactoringSuggestion(patternType, similarity) {
896
- const baseMessages = {
897
- "api-handler": "Extract common middleware or create a base handler class",
898
- validator: "Consolidate validation logic into shared schema validators (Zod/Yup)",
899
- utility: "Move to a shared utilities file and reuse across modules",
900
- "class-method": "Consider inheritance or composition to share behavior",
901
- component: "Extract shared logic into a custom hook or HOC",
902
- function: "Extract into a shared helper function",
903
- unknown: "Extract common logic into a reusable module"
904
- };
905
- const urgency = similarity > 0.95 ? " (CRITICAL: Nearly identical code)" : similarity > 0.9 ? " (HIGH: Very similar, refactor soon)" : "";
906
- return baseMessages[patternType] + urgency;
907
- }
908
- async function getSmartDefaults(directory, userOptions) {
909
- if (userOptions.useSmartDefaults === false) {
910
- return {
911
- rootDir: directory,
912
- minSimilarity: 0.6,
913
- minLines: 8,
914
- batchSize: 100,
915
- approx: true,
916
- minSharedTokens: 12,
917
- maxCandidatesPerBlock: 5,
918
- streamResults: false,
919
- severity: "all",
920
- includeTests: false
921
- };
922
- }
923
- const scanOptions = {
924
- rootDir: directory,
925
- include: userOptions.include || ["**/*.{ts,tsx,js,jsx,py,java}"],
926
- exclude: userOptions.exclude
927
- };
928
- const { scanFiles: scanFiles2 } = await import("@aiready/core");
929
- const files = await scanFiles2(scanOptions);
930
- const estimatedBlocks = files.length * 3;
931
- const maxCandidatesPerBlock = Math.max(
932
- 3,
933
- Math.min(10, Math.floor(3e4 / estimatedBlocks))
934
- );
935
- const minSimilarity = Math.min(0.75, 0.5 + estimatedBlocks / 1e4 * 0.25);
936
- const minLines = Math.max(
937
- 6,
938
- Math.min(12, 6 + Math.floor(estimatedBlocks / 2e3))
939
- );
940
- const minSharedTokens = Math.max(
941
- 10,
942
- Math.min(20, 10 + Math.floor(estimatedBlocks / 2e3))
943
- );
944
- const batchSize = estimatedBlocks > 1e3 ? 200 : 100;
945
- const severity = estimatedBlocks > 5e3 ? "high" : "all";
946
- const defaults = {
947
- rootDir: directory,
948
- minSimilarity,
949
- minLines,
950
- batchSize,
951
- approx: true,
952
- minSharedTokens,
953
- maxCandidatesPerBlock,
954
- streamResults: false,
955
- severity,
956
- includeTests: false
957
- };
958
- const result = { ...defaults };
959
- for (const [key, value] of Object.entries(defaults)) {
960
- if (key in userOptions && userOptions[key] !== void 0) {
961
- result[key] = userOptions[key];
962
- }
963
- }
964
- return result;
965
- }
966
- function logConfiguration(config, estimatedBlocks) {
967
- if (config.suppressToolConfig) return;
968
- console.log("\u{1F4CB} Configuration:");
969
- console.log(` Repository size: ~${estimatedBlocks} code blocks`);
970
- console.log(` Similarity threshold: ${config.minSimilarity}`);
971
- console.log(` Minimum lines: ${config.minLines}`);
972
- console.log(` Approximate mode: ${config.approx ? "enabled" : "disabled"}`);
973
- console.log(` Max candidates per block: ${config.maxCandidatesPerBlock}`);
974
- console.log(` Min shared tokens: ${config.minSharedTokens}`);
975
- console.log(` Severity filter: ${config.severity}`);
976
- console.log(` Include tests: ${config.includeTests}`);
977
- console.log("");
978
- }
979
- async function analyzePatterns(options) {
980
- const smartDefaults = await getSmartDefaults(options.rootDir || ".", options);
981
- const finalOptions = { ...smartDefaults, ...options };
982
- const {
983
- minSimilarity = 0.4,
984
- minLines = 5,
985
- batchSize = 100,
986
- approx = true,
987
- minSharedTokens = 8,
988
- maxCandidatesPerBlock = 100,
989
- streamResults = false,
990
- severity = "all",
991
- includeTests = false,
992
- groupByFilePair = true,
993
- createClusters = true,
994
- minClusterTokenCost = 1e3,
995
- minClusterFiles = 3,
996
- ...scanOptions
997
- } = finalOptions;
998
- const { scanFiles: scanFiles2 } = await import("@aiready/core");
999
- const files = await scanFiles2(scanOptions);
1000
- const estimatedBlocks = files.length * 3;
1001
- logConfiguration(finalOptions, estimatedBlocks);
1002
- const results = [];
1003
- const fileContents = await Promise.all(
1004
- files.map(async (file) => ({
1005
- file,
1006
- content: await readFileContent(file)
1007
- }))
1008
- );
1009
- const duplicates = await detectDuplicatePatterns(fileContents, {
1010
- minSimilarity,
1011
- minLines,
1012
- batchSize,
1013
- approx,
1014
- minSharedTokens,
1015
- maxCandidatesPerBlock,
1016
- streamResults,
1017
- onProgress: options.onProgress
1018
- });
1019
- for (const file of files) {
1020
- const fileDuplicates = duplicates.filter(
1021
- (dup) => dup.file1 === file || dup.file2 === file
1022
- );
1023
- const issues = fileDuplicates.map((dup) => {
1024
- const otherFile = dup.file1 === file ? dup.file2 : dup.file1;
1025
- const severity2 = dup.similarity > 0.95 ? "critical" : dup.similarity > 0.9 ? "major" : "minor";
1026
- return {
1027
- type: "duplicate-pattern",
1028
- severity: severity2,
1029
- message: `${dup.patternType} pattern ${Math.round(dup.similarity * 100)}% similar to ${otherFile} (${dup.tokenCost} tokens wasted)`,
1030
- location: {
1031
- file,
1032
- line: dup.file1 === file ? dup.line1 : dup.line2
1033
- },
1034
- suggestion: getRefactoringSuggestion(dup.patternType, dup.similarity)
1035
- };
1036
- });
1037
- let filteredIssues = issues;
1038
- if (severity !== "all") {
1039
- const severityMap = {
1040
- critical: ["critical"],
1041
- high: ["critical", "major"],
1042
- medium: ["critical", "major", "minor"]
1043
- };
1044
- const allowedSeverities = severityMap[severity] || ["critical", "major", "minor"];
1045
- filteredIssues = issues.filter(
1046
- (issue) => allowedSeverities.includes(issue.severity)
1047
- );
1048
- }
1049
- const totalTokenCost = fileDuplicates.reduce(
1050
- (sum, dup) => sum + dup.tokenCost,
1051
- 0
1052
- );
1053
- results.push({
1054
- fileName: file,
1055
- issues: filteredIssues,
1056
- metrics: {
1057
- tokenCost: totalTokenCost,
1058
- consistencyScore: Math.max(0, 1 - fileDuplicates.length * 0.1)
1059
- }
1060
- });
1061
- }
1062
- let groups;
1063
- let clusters;
1064
- if (groupByFilePair) {
1065
- groups = groupDuplicatesByFilePair(duplicates);
1066
- }
1067
- if (createClusters) {
1068
- const allClusters = createRefactorClusters(duplicates);
1069
- clusters = filterClustersByImpact(
1070
- allClusters,
1071
- minClusterTokenCost,
1072
- minClusterFiles
1073
- );
1074
- }
1075
- return { results, duplicates, files, groups, clusters };
1076
- }
1077
- function generateSummary(results) {
1078
- const allIssues = results.flatMap((r) => r.issues);
1079
- const totalTokenCost = results.reduce(
1080
- (sum, r) => sum + (r.metrics.tokenCost || 0),
1081
- 0
1082
- );
1083
- const patternsByType = {
1084
- "api-handler": 0,
1085
- validator: 0,
1086
- utility: 0,
1087
- "class-method": 0,
1088
- component: 0,
1089
- function: 0,
1090
- unknown: 0
1091
- };
1092
- allIssues.forEach((issue) => {
1093
- const match = issue.message.match(/^(\S+(?:-\S+)*) pattern/);
1094
- if (match) {
1095
- const type = match[1];
1096
- patternsByType[type] = (patternsByType[type] || 0) + 1;
1097
- }
1098
- });
1099
- const topDuplicates = allIssues.slice(0, 10).map((issue) => {
1100
- const similarityMatch = issue.message.match(/(\d+)% similar/);
1101
- const tokenMatch = issue.message.match(/\((\d+) tokens/);
1102
- const typeMatch = issue.message.match(/^(\S+(?:-\S+)*) pattern/);
1103
- const fileMatch = issue.message.match(/similar to (.+?) \(/);
1104
- return {
1105
- files: [
1106
- {
1107
- path: issue.location.file,
1108
- startLine: issue.location.line,
1109
- endLine: 0
1110
- // Not available from Issue
1111
- },
1112
- {
1113
- path: fileMatch?.[1] || "unknown",
1114
- startLine: 0,
1115
- // Not available from Issue
1116
- endLine: 0
1117
- // Not available from Issue
1118
- }
1119
- ],
1120
- similarity: similarityMatch ? parseInt(similarityMatch[1]) / 100 : 0,
1121
- patternType: typeMatch?.[1] || "unknown",
1122
- tokenCost: tokenMatch ? parseInt(tokenMatch[1]) : 0
1123
- };
1124
- });
1125
- return {
1126
- totalPatterns: allIssues.length,
1127
- totalTokenCost,
1128
- patternsByType,
1129
- topDuplicates
1130
- };
1131
- }
1132
-
1133
- export {
1134
- calculateSeverity,
1135
- getSeverityLabel,
1136
- filterBySeverity,
1137
- detectDuplicatePatterns,
1138
- calculatePatternScore,
1139
- getSmartDefaults,
1140
- analyzePatterns,
1141
- generateSummary
1142
- };