@aiready/pattern-detect 0.16.19 → 0.16.21

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (56) hide show
  1. package/dist/analyzer-entry/index.d.mts +3 -0
  2. package/dist/analyzer-entry/index.d.ts +3 -0
  3. package/dist/analyzer-entry/index.js +693 -0
  4. package/dist/analyzer-entry/index.mjs +12 -0
  5. package/dist/analyzer-entry.d.mts +100 -3
  6. package/dist/analyzer-entry.d.ts +100 -3
  7. package/dist/analyzer-entry.js +9 -126
  8. package/dist/analyzer-entry.mjs +2 -2
  9. package/dist/chunk-65UQ5J2J.mjs +64 -0
  10. package/dist/chunk-6JTVOBJX.mjs +64 -0
  11. package/dist/chunk-BKRPSTT2.mjs +64 -0
  12. package/dist/chunk-CMWW24HW.mjs +259 -0
  13. package/dist/chunk-DNZS4ESD.mjs +391 -0
  14. package/dist/chunk-GLKAGFKX.mjs +391 -0
  15. package/dist/chunk-GREN7X5H.mjs +143 -0
  16. package/dist/chunk-JBUZ6YHE.mjs +391 -0
  17. package/dist/chunk-KWMNN3TG.mjs +391 -0
  18. package/dist/chunk-LYKRYBSM.mjs +64 -0
  19. package/dist/chunk-MHU3CL4R.mjs +64 -0
  20. package/dist/chunk-RS73WLNI.mjs +251 -0
  21. package/dist/chunk-SVCSIZ2A.mjs +259 -0
  22. package/dist/chunk-VGMM3L3O.mjs +143 -0
  23. package/dist/chunk-XNPID6FU.mjs +391 -0
  24. package/dist/cli.js +29 -147
  25. package/dist/cli.mjs +27 -25
  26. package/dist/context-rules-entry/index.d.mts +2 -0
  27. package/dist/context-rules-entry/index.d.ts +2 -0
  28. package/dist/context-rules-entry/index.js +207 -0
  29. package/dist/context-rules-entry/index.mjs +12 -0
  30. package/dist/context-rules-entry.d.mts +55 -2
  31. package/dist/context-rules-entry.d.ts +55 -2
  32. package/dist/detector-entry/index.d.mts +14 -0
  33. package/dist/detector-entry/index.d.ts +14 -0
  34. package/dist/detector-entry/index.js +301 -0
  35. package/dist/detector-entry/index.mjs +7 -0
  36. package/dist/detector-entry.d.mts +2 -2
  37. package/dist/detector-entry.d.ts +2 -2
  38. package/dist/detector-entry.js +9 -126
  39. package/dist/detector-entry.mjs +1 -1
  40. package/dist/index-BVz-HnZd.d.mts +119 -0
  41. package/dist/index-BwuoiCNm.d.ts +119 -0
  42. package/dist/index-y2uJSngh.d.mts +60 -0
  43. package/dist/index-y2uJSngh.d.ts +60 -0
  44. package/dist/index.d.mts +4 -4
  45. package/dist/index.d.ts +4 -4
  46. package/dist/index.js +9 -126
  47. package/dist/index.mjs +3 -3
  48. package/dist/scoring-entry/index.d.mts +23 -0
  49. package/dist/scoring-entry/index.d.ts +23 -0
  50. package/dist/scoring-entry/index.js +133 -0
  51. package/dist/scoring-entry/index.mjs +6 -0
  52. package/dist/scoring-entry.d.mts +1 -1
  53. package/dist/scoring-entry.d.ts +1 -1
  54. package/dist/types-C4lmb2Yh.d.mts +36 -0
  55. package/dist/types-C4lmb2Yh.d.ts +36 -0
  56. package/package.json +16 -16
@@ -0,0 +1,391 @@
1
+ import {
2
+ detectDuplicatePatterns
3
+ } from "./chunk-RS73WLNI.mjs";
4
+ import {
5
+ calculateSeverity
6
+ } from "./chunk-I6ETJC7L.mjs";
7
+
8
+ // src/grouping.ts
9
+ import { getSeverityLevel } from "@aiready/core";
10
+ import path from "path";
11
+ function groupDuplicatesByFilePair(duplicates) {
12
+ const groups = /* @__PURE__ */ new Map();
13
+ for (const dup of duplicates) {
14
+ const files = [dup.file1, dup.file2].sort();
15
+ const key = files.join("::");
16
+ if (!groups.has(key)) {
17
+ groups.set(key, {
18
+ filePair: key,
19
+ severity: dup.severity,
20
+ occurrences: 0,
21
+ totalTokenCost: 0,
22
+ averageSimilarity: 0,
23
+ patternTypes: /* @__PURE__ */ new Set(),
24
+ lineRanges: []
25
+ });
26
+ }
27
+ const group = groups.get(key);
28
+ group.occurrences++;
29
+ group.totalTokenCost += dup.tokenCost;
30
+ group.averageSimilarity += dup.similarity;
31
+ group.patternTypes.add(dup.patternType);
32
+ group.lineRanges.push({
33
+ file1: { start: dup.line1, end: dup.endLine1 },
34
+ file2: { start: dup.line2, end: dup.endLine2 }
35
+ });
36
+ const currentSev = dup.severity;
37
+ if (getSeverityLevel(currentSev) > getSeverityLevel(group.severity)) {
38
+ group.severity = currentSev;
39
+ }
40
+ }
41
+ return Array.from(groups.values()).map((g) => ({
42
+ ...g,
43
+ averageSimilarity: g.averageSimilarity / g.occurrences
44
+ }));
45
+ }
46
+ function createRefactorClusters(duplicates) {
47
+ const adjacency = /* @__PURE__ */ new Map();
48
+ const visited = /* @__PURE__ */ new Set();
49
+ const components = [];
50
+ for (const dup of duplicates) {
51
+ if (!adjacency.has(dup.file1)) adjacency.set(dup.file1, /* @__PURE__ */ new Set());
52
+ if (!adjacency.has(dup.file2)) adjacency.set(dup.file2, /* @__PURE__ */ new Set());
53
+ adjacency.get(dup.file1).add(dup.file2);
54
+ adjacency.get(dup.file2).add(dup.file1);
55
+ }
56
+ for (const file of adjacency.keys()) {
57
+ if (visited.has(file)) continue;
58
+ const component = [];
59
+ const queue = [file];
60
+ visited.add(file);
61
+ while (queue.length > 0) {
62
+ const curr = queue.shift();
63
+ component.push(curr);
64
+ for (const neighbor of adjacency.get(curr) || []) {
65
+ if (!visited.has(neighbor)) {
66
+ visited.add(neighbor);
67
+ queue.push(neighbor);
68
+ }
69
+ }
70
+ }
71
+ components.push(component);
72
+ }
73
+ const clusters = [];
74
+ for (const component of components) {
75
+ if (component.length < 2) continue;
76
+ const componentDups = duplicates.filter(
77
+ (d) => component.includes(d.file1) && component.includes(d.file2)
78
+ );
79
+ const totalTokenCost = componentDups.reduce(
80
+ (sum, d) => sum + d.tokenCost,
81
+ 0
82
+ );
83
+ const avgSimilarity = componentDups.reduce((sum, d) => sum + d.similarity, 0) / Math.max(1, componentDups.length);
84
+ const name = determineClusterName(component);
85
+ const { severity, reason, suggestion } = calculateSeverity(
86
+ component[0],
87
+ component[1],
88
+ "",
89
+ // Code not available here
90
+ avgSimilarity,
91
+ 30
92
+ // Assume substantial if clustered
93
+ );
94
+ clusters.push({
95
+ id: `cluster-${clusters.length}`,
96
+ name,
97
+ files: component,
98
+ severity,
99
+ duplicateCount: componentDups.length,
100
+ totalTokenCost,
101
+ averageSimilarity: avgSimilarity,
102
+ reason,
103
+ suggestion
104
+ });
105
+ }
106
+ return clusters;
107
+ }
108
+ function determineClusterName(files) {
109
+ if (files.length === 0) return "Unknown Cluster";
110
+ if (files.some((f) => f.includes("blog"))) return "Blog SEO Boilerplate";
111
+ if (files.some((f) => f.includes("buttons")))
112
+ return "Button Component Variants";
113
+ if (files.some((f) => f.includes("cards"))) return "Card Component Variants";
114
+ if (files.some((f) => f.includes("login.test"))) return "E2E Test Patterns";
115
+ const first = files[0];
116
+ const dirName = path.dirname(first).split(path.sep).pop();
117
+ if (dirName && dirName !== "." && dirName !== "..") {
118
+ return `${dirName.charAt(0).toUpperCase() + dirName.slice(1)} Domain Group`;
119
+ }
120
+ return "Shared Pattern Group";
121
+ }
122
+ function filterClustersByImpact(clusters, minTokenCost = 1e3, minFiles = 3) {
123
+ return clusters.filter(
124
+ (c) => c.totalTokenCost >= minTokenCost && c.files.length >= minFiles
125
+ );
126
+ }
127
+
128
+ // src/analyzer.ts
129
+ import { scanFiles, readFileContent, Severity as Severity2, IssueType } from "@aiready/core";
130
+ function getRefactoringSuggestion(patternType, similarity) {
131
+ const baseMessages = {
132
+ "api-handler": "Extract common middleware or create a base handler class",
133
+ validator: "Consolidate validation logic into shared schema validators (Zod/Yup)",
134
+ utility: "Move to a shared utilities file and reuse across modules",
135
+ "class-method": "Consider inheritance or composition to share behavior",
136
+ component: "Extract shared logic into a custom hook or HOC",
137
+ function: "Extract into a shared helper function",
138
+ unknown: "Extract common logic into a reusable module"
139
+ };
140
+ const urgency = similarity > 0.95 ? " (CRITICAL: Nearly identical code)" : similarity > 0.9 ? " (HIGH: Very similar, refactor soon)" : "";
141
+ return baseMessages[patternType] + urgency;
142
+ }
143
+ async function getSmartDefaults(directory, userOptions) {
144
+ if (userOptions.useSmartDefaults === false) {
145
+ return {
146
+ rootDir: directory,
147
+ minSimilarity: 0.6,
148
+ minLines: 8,
149
+ batchSize: 100,
150
+ approx: true,
151
+ minSharedTokens: 12,
152
+ maxCandidatesPerBlock: 5,
153
+ streamResults: false,
154
+ severity: "all",
155
+ includeTests: false
156
+ };
157
+ }
158
+ const scanOptions = {
159
+ rootDir: directory,
160
+ include: userOptions.include || ["**/*.{ts,tsx,js,jsx,py,java}"],
161
+ exclude: userOptions.exclude
162
+ };
163
+ const files = await scanFiles(scanOptions);
164
+ const fileCount = files.length;
165
+ const estimatedBlocks = fileCount * 5;
166
+ const minLines = Math.max(
167
+ 6,
168
+ Math.min(20, 6 + Math.floor(estimatedBlocks / 1e3) * 2)
169
+ );
170
+ const minSimilarity = Math.min(0.85, 0.5 + estimatedBlocks / 5e3 * 0.3);
171
+ const batchSize = estimatedBlocks > 1e3 ? 200 : 100;
172
+ const severity = estimatedBlocks > 3e3 ? "high" : "all";
173
+ const maxCandidatesPerBlock = Math.max(
174
+ 5,
175
+ Math.min(100, Math.floor(1e6 / estimatedBlocks))
176
+ );
177
+ const defaults = {
178
+ rootDir: directory,
179
+ minSimilarity,
180
+ minLines,
181
+ batchSize,
182
+ approx: true,
183
+ minSharedTokens: 10,
184
+ maxCandidatesPerBlock,
185
+ streamResults: false,
186
+ severity,
187
+ includeTests: false
188
+ };
189
+ const result = { ...defaults };
190
+ for (const key of Object.keys(defaults)) {
191
+ if (key in userOptions && userOptions[key] !== void 0) {
192
+ result[key] = userOptions[key];
193
+ }
194
+ }
195
+ return result;
196
+ }
197
+ function logConfiguration(config, estimatedBlocks) {
198
+ if (config.suppressToolConfig) return;
199
+ console.log("\u{1F4CB} Configuration:");
200
+ console.log(` Repository size: ~${estimatedBlocks} code blocks`);
201
+ console.log(` Similarity threshold: ${config.minSimilarity}`);
202
+ console.log(` Minimum lines: ${config.minLines}`);
203
+ console.log(` Approximate mode: ${config.approx ? "enabled" : "disabled"}`);
204
+ console.log(` Max candidates per block: ${config.maxCandidatesPerBlock}`);
205
+ console.log(` Min shared tokens: ${config.minSharedTokens}`);
206
+ console.log(` Severity filter: ${config.severity}`);
207
+ console.log(` Include tests: ${config.includeTests}`);
208
+ if (config.excludePatterns && config.excludePatterns.length > 0) {
209
+ console.log(` Exclude patterns: ${config.excludePatterns.length} active`);
210
+ }
211
+ if (config.confidenceThreshold && config.confidenceThreshold > 0) {
212
+ console.log(` Confidence threshold: ${config.confidenceThreshold}`);
213
+ }
214
+ if (config.ignoreWhitelist && config.ignoreWhitelist.length > 0) {
215
+ console.log(
216
+ ` Ignore whitelist: ${config.ignoreWhitelist.length} entries`
217
+ );
218
+ }
219
+ console.log("");
220
+ }
221
+ async function analyzePatterns(options) {
222
+ const smartDefaults = await getSmartDefaults(options.rootDir || ".", options);
223
+ const finalOptions = { ...smartDefaults, ...options };
224
+ const {
225
+ minSimilarity = 0.4,
226
+ minLines = 5,
227
+ batchSize = 100,
228
+ approx = true,
229
+ minSharedTokens = 8,
230
+ maxCandidatesPerBlock = 100,
231
+ streamResults = false,
232
+ severity = "all",
233
+ groupByFilePair = true,
234
+ createClusters = true,
235
+ minClusterTokenCost = 1e3,
236
+ minClusterFiles = 3,
237
+ excludePatterns = [],
238
+ confidenceThreshold = 0,
239
+ ignoreWhitelist = [],
240
+ ...scanOptions
241
+ } = finalOptions;
242
+ const files = await scanFiles(scanOptions);
243
+ const estimatedBlocks = files.length * 3;
244
+ logConfiguration(finalOptions, estimatedBlocks);
245
+ const results = [];
246
+ const READ_BATCH_SIZE = 50;
247
+ const fileContents = [];
248
+ for (let i = 0; i < files.length; i += READ_BATCH_SIZE) {
249
+ const batch = files.slice(i, i + READ_BATCH_SIZE);
250
+ const batchContents = await Promise.all(
251
+ batch.map(async (file) => ({
252
+ file,
253
+ content: await readFileContent(file)
254
+ }))
255
+ );
256
+ fileContents.push(...batchContents);
257
+ }
258
+ const duplicates = await detectDuplicatePatterns(fileContents, {
259
+ minSimilarity,
260
+ minLines,
261
+ batchSize,
262
+ approx,
263
+ minSharedTokens,
264
+ maxCandidatesPerBlock,
265
+ streamResults,
266
+ excludePatterns,
267
+ confidenceThreshold,
268
+ ignoreWhitelist,
269
+ onProgress: options.onProgress
270
+ });
271
+ for (const file of files) {
272
+ const fileDuplicates = duplicates.filter(
273
+ (dup) => dup.file1 === file || dup.file2 === file
274
+ );
275
+ const issues = fileDuplicates.map((dup) => {
276
+ const otherFile = dup.file1 === file ? dup.file2 : dup.file1;
277
+ const severity2 = dup.similarity > 0.95 ? Severity2.Critical : dup.similarity > 0.9 ? Severity2.Major : Severity2.Minor;
278
+ return {
279
+ type: IssueType.DuplicatePattern,
280
+ severity: severity2,
281
+ message: `${dup.patternType} pattern ${Math.round(dup.similarity * 100)}% similar to ${otherFile} (${dup.tokenCost} tokens wasted)`,
282
+ location: {
283
+ file,
284
+ line: dup.file1 === file ? dup.line1 : dup.line2
285
+ },
286
+ suggestion: getRefactoringSuggestion(dup.patternType, dup.similarity)
287
+ };
288
+ });
289
+ let filteredIssues = issues;
290
+ if (severity !== "all") {
291
+ const severityMap = {
292
+ critical: [Severity2.Critical],
293
+ high: [Severity2.Critical, Severity2.Major],
294
+ medium: [Severity2.Critical, Severity2.Major, Severity2.Minor]
295
+ };
296
+ const allowedSeverities = severityMap[severity] || [Severity2.Critical, Severity2.Major, Severity2.Minor];
297
+ filteredIssues = issues.filter(
298
+ (issue) => allowedSeverities.includes(issue.severity)
299
+ );
300
+ }
301
+ const totalTokenCost = fileDuplicates.reduce(
302
+ (sum, dup) => sum + dup.tokenCost,
303
+ 0
304
+ );
305
+ results.push({
306
+ fileName: file,
307
+ issues: filteredIssues,
308
+ metrics: {
309
+ tokenCost: totalTokenCost,
310
+ consistencyScore: Math.max(0, 1 - fileDuplicates.length * 0.1)
311
+ }
312
+ });
313
+ }
314
+ let groups;
315
+ let clusters;
316
+ if (groupByFilePair) {
317
+ groups = groupDuplicatesByFilePair(duplicates);
318
+ }
319
+ if (createClusters) {
320
+ const allClusters = createRefactorClusters(duplicates);
321
+ clusters = filterClustersByImpact(
322
+ allClusters,
323
+ minClusterTokenCost,
324
+ minClusterFiles
325
+ );
326
+ }
327
+ return { results, duplicates, files, groups, clusters, config: finalOptions };
328
+ }
329
+ function generateSummary(results) {
330
+ const allIssues = results.flatMap((r) => r.issues);
331
+ const totalTokenCost = results.reduce(
332
+ (sum, r) => sum + (r.metrics.tokenCost || 0),
333
+ 0
334
+ );
335
+ const patternsByType = {
336
+ "api-handler": 0,
337
+ validator: 0,
338
+ utility: 0,
339
+ "class-method": 0,
340
+ component: 0,
341
+ function: 0,
342
+ unknown: 0
343
+ };
344
+ allIssues.forEach((issue) => {
345
+ const match = issue.message.match(/^(\S+(?:-\S+)*) pattern/);
346
+ if (match) {
347
+ const type = match[1];
348
+ patternsByType[type] = (patternsByType[type] || 0) + 1;
349
+ }
350
+ });
351
+ const topDuplicates = allIssues.slice(0, 10).map((issue) => {
352
+ const similarityMatch = issue.message.match(/(\d+)% similar/);
353
+ const tokenMatch = issue.message.match(/\((\d+) tokens/);
354
+ const typeMatch = issue.message.match(/^(\S+(?:-\S+)*) pattern/);
355
+ const fileMatch = issue.message.match(/similar to (.+?) \(/);
356
+ return {
357
+ files: [
358
+ {
359
+ path: issue.location.file,
360
+ startLine: issue.location.line,
361
+ endLine: 0
362
+ },
363
+ {
364
+ path: fileMatch?.[1] || "unknown",
365
+ startLine: 0,
366
+ endLine: 0
367
+ }
368
+ ],
369
+ similarity: similarityMatch ? parseInt(similarityMatch[1]) / 100 : 0,
370
+ confidence: similarityMatch ? parseInt(similarityMatch[1]) / 100 : 0,
371
+ // Fallback for summary
372
+ patternType: typeMatch?.[1] || "unknown",
373
+ tokenCost: tokenMatch ? parseInt(tokenMatch[1]) : 0
374
+ };
375
+ });
376
+ return {
377
+ totalPatterns: allIssues.length,
378
+ totalTokenCost,
379
+ patternsByType,
380
+ topDuplicates
381
+ };
382
+ }
383
+
384
+ export {
385
+ groupDuplicatesByFilePair,
386
+ createRefactorClusters,
387
+ filterClustersByImpact,
388
+ getSmartDefaults,
389
+ analyzePatterns,
390
+ generateSummary
391
+ };