@aiready/pattern-detect 0.16.19 → 0.16.21

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (56) hide show
  1. package/dist/analyzer-entry/index.d.mts +3 -0
  2. package/dist/analyzer-entry/index.d.ts +3 -0
  3. package/dist/analyzer-entry/index.js +693 -0
  4. package/dist/analyzer-entry/index.mjs +12 -0
  5. package/dist/analyzer-entry.d.mts +100 -3
  6. package/dist/analyzer-entry.d.ts +100 -3
  7. package/dist/analyzer-entry.js +9 -126
  8. package/dist/analyzer-entry.mjs +2 -2
  9. package/dist/chunk-65UQ5J2J.mjs +64 -0
  10. package/dist/chunk-6JTVOBJX.mjs +64 -0
  11. package/dist/chunk-BKRPSTT2.mjs +64 -0
  12. package/dist/chunk-CMWW24HW.mjs +259 -0
  13. package/dist/chunk-DNZS4ESD.mjs +391 -0
  14. package/dist/chunk-GLKAGFKX.mjs +391 -0
  15. package/dist/chunk-GREN7X5H.mjs +143 -0
  16. package/dist/chunk-JBUZ6YHE.mjs +391 -0
  17. package/dist/chunk-KWMNN3TG.mjs +391 -0
  18. package/dist/chunk-LYKRYBSM.mjs +64 -0
  19. package/dist/chunk-MHU3CL4R.mjs +64 -0
  20. package/dist/chunk-RS73WLNI.mjs +251 -0
  21. package/dist/chunk-SVCSIZ2A.mjs +259 -0
  22. package/dist/chunk-VGMM3L3O.mjs +143 -0
  23. package/dist/chunk-XNPID6FU.mjs +391 -0
  24. package/dist/cli.js +29 -147
  25. package/dist/cli.mjs +27 -25
  26. package/dist/context-rules-entry/index.d.mts +2 -0
  27. package/dist/context-rules-entry/index.d.ts +2 -0
  28. package/dist/context-rules-entry/index.js +207 -0
  29. package/dist/context-rules-entry/index.mjs +12 -0
  30. package/dist/context-rules-entry.d.mts +55 -2
  31. package/dist/context-rules-entry.d.ts +55 -2
  32. package/dist/detector-entry/index.d.mts +14 -0
  33. package/dist/detector-entry/index.d.ts +14 -0
  34. package/dist/detector-entry/index.js +301 -0
  35. package/dist/detector-entry/index.mjs +7 -0
  36. package/dist/detector-entry.d.mts +2 -2
  37. package/dist/detector-entry.d.ts +2 -2
  38. package/dist/detector-entry.js +9 -126
  39. package/dist/detector-entry.mjs +1 -1
  40. package/dist/index-BVz-HnZd.d.mts +119 -0
  41. package/dist/index-BwuoiCNm.d.ts +119 -0
  42. package/dist/index-y2uJSngh.d.mts +60 -0
  43. package/dist/index-y2uJSngh.d.ts +60 -0
  44. package/dist/index.d.mts +4 -4
  45. package/dist/index.d.ts +4 -4
  46. package/dist/index.js +9 -126
  47. package/dist/index.mjs +3 -3
  48. package/dist/scoring-entry/index.d.mts +23 -0
  49. package/dist/scoring-entry/index.d.ts +23 -0
  50. package/dist/scoring-entry/index.js +133 -0
  51. package/dist/scoring-entry/index.mjs +6 -0
  52. package/dist/scoring-entry.d.mts +1 -1
  53. package/dist/scoring-entry.d.ts +1 -1
  54. package/dist/types-C4lmb2Yh.d.mts +36 -0
  55. package/dist/types-C4lmb2Yh.d.ts +36 -0
  56. package/package.json +16 -16
@@ -0,0 +1,391 @@
1
+ import {
2
+ detectDuplicatePatterns
3
+ } from "./chunk-CMWW24HW.mjs";
4
+ import {
5
+ calculateSeverity
6
+ } from "./chunk-I6ETJC7L.mjs";
7
+
8
+ // src/grouping.ts
9
+ import { getSeverityLevel } from "@aiready/core";
10
+ import path from "path";
11
+ function groupDuplicatesByFilePair(duplicates) {
12
+ const groups = /* @__PURE__ */ new Map();
13
+ for (const dup of duplicates) {
14
+ const files = [dup.file1, dup.file2].sort();
15
+ const key = files.join("::");
16
+ if (!groups.has(key)) {
17
+ groups.set(key, {
18
+ filePair: key,
19
+ severity: dup.severity,
20
+ occurrences: 0,
21
+ totalTokenCost: 0,
22
+ averageSimilarity: 0,
23
+ patternTypes: /* @__PURE__ */ new Set(),
24
+ lineRanges: []
25
+ });
26
+ }
27
+ const group = groups.get(key);
28
+ group.occurrences++;
29
+ group.totalTokenCost += dup.tokenCost;
30
+ group.averageSimilarity += dup.similarity;
31
+ group.patternTypes.add(dup.patternType);
32
+ group.lineRanges.push({
33
+ file1: { start: dup.line1, end: dup.endLine1 },
34
+ file2: { start: dup.line2, end: dup.endLine2 }
35
+ });
36
+ const currentSev = dup.severity;
37
+ if (getSeverityLevel(currentSev) > getSeverityLevel(group.severity)) {
38
+ group.severity = currentSev;
39
+ }
40
+ }
41
+ return Array.from(groups.values()).map((g) => ({
42
+ ...g,
43
+ averageSimilarity: g.averageSimilarity / g.occurrences
44
+ }));
45
+ }
46
+ function createRefactorClusters(duplicates) {
47
+ const adjacency = /* @__PURE__ */ new Map();
48
+ const visited = /* @__PURE__ */ new Set();
49
+ const components = [];
50
+ for (const dup of duplicates) {
51
+ if (!adjacency.has(dup.file1)) adjacency.set(dup.file1, /* @__PURE__ */ new Set());
52
+ if (!adjacency.has(dup.file2)) adjacency.set(dup.file2, /* @__PURE__ */ new Set());
53
+ adjacency.get(dup.file1).add(dup.file2);
54
+ adjacency.get(dup.file2).add(dup.file1);
55
+ }
56
+ for (const file of adjacency.keys()) {
57
+ if (visited.has(file)) continue;
58
+ const component = [];
59
+ const queue = [file];
60
+ visited.add(file);
61
+ while (queue.length > 0) {
62
+ const curr = queue.shift();
63
+ component.push(curr);
64
+ for (const neighbor of adjacency.get(curr) || []) {
65
+ if (!visited.has(neighbor)) {
66
+ visited.add(neighbor);
67
+ queue.push(neighbor);
68
+ }
69
+ }
70
+ }
71
+ components.push(component);
72
+ }
73
+ const clusters = [];
74
+ for (const component of components) {
75
+ if (component.length < 2) continue;
76
+ const componentDups = duplicates.filter(
77
+ (d) => component.includes(d.file1) && component.includes(d.file2)
78
+ );
79
+ const totalTokenCost = componentDups.reduce(
80
+ (sum, d) => sum + d.tokenCost,
81
+ 0
82
+ );
83
+ const avgSimilarity = componentDups.reduce((sum, d) => sum + d.similarity, 0) / Math.max(1, componentDups.length);
84
+ const name = determineClusterName(component);
85
+ const { severity, reason, suggestion } = calculateSeverity(
86
+ component[0],
87
+ component[1],
88
+ "",
89
+ // Code not available here
90
+ avgSimilarity,
91
+ 30
92
+ // Assume substantial if clustered
93
+ );
94
+ clusters.push({
95
+ id: `cluster-${clusters.length}`,
96
+ name,
97
+ files: component,
98
+ severity,
99
+ duplicateCount: componentDups.length,
100
+ totalTokenCost,
101
+ averageSimilarity: avgSimilarity,
102
+ reason,
103
+ suggestion
104
+ });
105
+ }
106
+ return clusters;
107
+ }
108
+ function determineClusterName(files) {
109
+ if (files.length === 0) return "Unknown Cluster";
110
+ if (files.some((f) => f.includes("blog"))) return "Blog SEO Boilerplate";
111
+ if (files.some((f) => f.includes("buttons")))
112
+ return "Button Component Variants";
113
+ if (files.some((f) => f.includes("cards"))) return "Card Component Variants";
114
+ if (files.some((f) => f.includes("login.test"))) return "E2E Test Patterns";
115
+ const first = files[0];
116
+ const dirName = path.dirname(first).split(path.sep).pop();
117
+ if (dirName && dirName !== "." && dirName !== "..") {
118
+ return `${dirName.charAt(0).toUpperCase() + dirName.slice(1)} Domain Group`;
119
+ }
120
+ return "Shared Pattern Group";
121
+ }
122
+ function filterClustersByImpact(clusters, minTokenCost = 1e3, minFiles = 3) {
123
+ return clusters.filter(
124
+ (c) => c.totalTokenCost >= minTokenCost && c.files.length >= minFiles
125
+ );
126
+ }
127
+
128
+ // src/analyzer.ts
129
+ import { scanFiles, readFileContent, Severity as Severity2, IssueType } from "@aiready/core";
130
+ function getRefactoringSuggestion(patternType, similarity) {
131
+ const baseMessages = {
132
+ "api-handler": "Extract common middleware or create a base handler class",
133
+ validator: "Consolidate validation logic into shared schema validators (Zod/Yup)",
134
+ utility: "Move to a shared utilities file and reuse across modules",
135
+ "class-method": "Consider inheritance or composition to share behavior",
136
+ component: "Extract shared logic into a custom hook or HOC",
137
+ function: "Extract into a shared helper function",
138
+ unknown: "Extract common logic into a reusable module"
139
+ };
140
+ const urgency = similarity > 0.95 ? " (CRITICAL: Nearly identical code)" : similarity > 0.9 ? " (HIGH: Very similar, refactor soon)" : "";
141
+ return baseMessages[patternType] + urgency;
142
+ }
143
+ async function getSmartDefaults(directory, userOptions) {
144
+ if (userOptions.useSmartDefaults === false) {
145
+ return {
146
+ rootDir: directory,
147
+ minSimilarity: 0.6,
148
+ minLines: 8,
149
+ batchSize: 100,
150
+ approx: true,
151
+ minSharedTokens: 12,
152
+ maxCandidatesPerBlock: 5,
153
+ streamResults: false,
154
+ severity: "all",
155
+ includeTests: false
156
+ };
157
+ }
158
+ const scanOptions = {
159
+ rootDir: directory,
160
+ include: userOptions.include || ["**/*.{ts,tsx,js,jsx,py,java}"],
161
+ exclude: userOptions.exclude
162
+ };
163
+ const files = await scanFiles(scanOptions);
164
+ const fileCount = files.length;
165
+ const estimatedBlocks = fileCount * 5;
166
+ const minLines = Math.max(
167
+ 6,
168
+ Math.min(20, 6 + Math.floor(estimatedBlocks / 1e3) * 2)
169
+ );
170
+ const minSimilarity = Math.min(0.85, 0.5 + estimatedBlocks / 5e3 * 0.3);
171
+ const batchSize = estimatedBlocks > 1e3 ? 200 : 100;
172
+ const severity = estimatedBlocks > 3e3 ? "high" : "all";
173
+ const maxCandidatesPerBlock = Math.max(
174
+ 5,
175
+ Math.min(100, Math.floor(1e6 / estimatedBlocks))
176
+ );
177
+ const defaults = {
178
+ rootDir: directory,
179
+ minSimilarity,
180
+ minLines,
181
+ batchSize,
182
+ approx: true,
183
+ minSharedTokens: 10,
184
+ maxCandidatesPerBlock,
185
+ streamResults: false,
186
+ severity,
187
+ includeTests: false
188
+ };
189
+ const result = { ...defaults };
190
+ for (const key of Object.keys(defaults)) {
191
+ if (key in userOptions && userOptions[key] !== void 0) {
192
+ result[key] = userOptions[key];
193
+ }
194
+ }
195
+ return result;
196
+ }
197
+ function logConfiguration(config, estimatedBlocks) {
198
+ if (config.suppressToolConfig) return;
199
+ console.log("\u{1F4CB} Configuration:");
200
+ console.log(` Repository size: ~${estimatedBlocks} code blocks`);
201
+ console.log(` Similarity threshold: ${config.minSimilarity}`);
202
+ console.log(` Minimum lines: ${config.minLines}`);
203
+ console.log(` Approximate mode: ${config.approx ? "enabled" : "disabled"}`);
204
+ console.log(` Max candidates per block: ${config.maxCandidatesPerBlock}`);
205
+ console.log(` Min shared tokens: ${config.minSharedTokens}`);
206
+ console.log(` Severity filter: ${config.severity}`);
207
+ console.log(` Include tests: ${config.includeTests}`);
208
+ if (config.excludePatterns && config.excludePatterns.length > 0) {
209
+ console.log(` Exclude patterns: ${config.excludePatterns.length} active`);
210
+ }
211
+ if (config.confidenceThreshold && config.confidenceThreshold > 0) {
212
+ console.log(` Confidence threshold: ${config.confidenceThreshold}`);
213
+ }
214
+ if (config.ignoreWhitelist && config.ignoreWhitelist.length > 0) {
215
+ console.log(
216
+ ` Ignore whitelist: ${config.ignoreWhitelist.length} entries`
217
+ );
218
+ }
219
+ console.log("");
220
+ }
221
+ async function analyzePatterns(options) {
222
+ const smartDefaults = await getSmartDefaults(options.rootDir || ".", options);
223
+ const finalOptions = { ...smartDefaults, ...options };
224
+ const {
225
+ minSimilarity = 0.4,
226
+ minLines = 5,
227
+ batchSize = 100,
228
+ approx = true,
229
+ minSharedTokens = 8,
230
+ maxCandidatesPerBlock = 100,
231
+ streamResults = false,
232
+ severity = "all",
233
+ groupByFilePair = true,
234
+ createClusters = true,
235
+ minClusterTokenCost = 1e3,
236
+ minClusterFiles = 3,
237
+ excludePatterns = [],
238
+ confidenceThreshold = 0,
239
+ ignoreWhitelist = [],
240
+ ...scanOptions
241
+ } = finalOptions;
242
+ const files = await scanFiles(scanOptions);
243
+ const estimatedBlocks = files.length * 3;
244
+ logConfiguration(finalOptions, estimatedBlocks);
245
+ const results = [];
246
+ const READ_BATCH_SIZE = 50;
247
+ const fileContents = [];
248
+ for (let i = 0; i < files.length; i += READ_BATCH_SIZE) {
249
+ const batch = files.slice(i, i + READ_BATCH_SIZE);
250
+ const batchContents = await Promise.all(
251
+ batch.map(async (file) => ({
252
+ file,
253
+ content: await readFileContent(file)
254
+ }))
255
+ );
256
+ fileContents.push(...batchContents);
257
+ }
258
+ const duplicates = await detectDuplicatePatterns(fileContents, {
259
+ minSimilarity,
260
+ minLines,
261
+ batchSize,
262
+ approx,
263
+ minSharedTokens,
264
+ maxCandidatesPerBlock,
265
+ streamResults,
266
+ excludePatterns,
267
+ confidenceThreshold,
268
+ ignoreWhitelist,
269
+ onProgress: options.onProgress
270
+ });
271
+ for (const file of files) {
272
+ const fileDuplicates = duplicates.filter(
273
+ (dup) => dup.file1 === file || dup.file2 === file
274
+ );
275
+ const issues = fileDuplicates.map((dup) => {
276
+ const otherFile = dup.file1 === file ? dup.file2 : dup.file1;
277
+ const severity2 = dup.similarity > 0.95 ? Severity2.Critical : dup.similarity > 0.9 ? Severity2.Major : Severity2.Minor;
278
+ return {
279
+ type: IssueType.DuplicatePattern,
280
+ severity: severity2,
281
+ message: `${dup.patternType} pattern ${Math.round(dup.similarity * 100)}% similar to ${otherFile} (${dup.tokenCost} tokens wasted)`,
282
+ location: {
283
+ file,
284
+ line: dup.file1 === file ? dup.line1 : dup.line2
285
+ },
286
+ suggestion: getRefactoringSuggestion(dup.patternType, dup.similarity)
287
+ };
288
+ });
289
+ let filteredIssues = issues;
290
+ if (severity !== "all") {
291
+ const severityMap = {
292
+ critical: [Severity2.Critical],
293
+ high: [Severity2.Critical, Severity2.Major],
294
+ medium: [Severity2.Critical, Severity2.Major, Severity2.Minor]
295
+ };
296
+ const allowedSeverities = severityMap[severity] || [Severity2.Critical, Severity2.Major, Severity2.Minor];
297
+ filteredIssues = issues.filter(
298
+ (issue) => allowedSeverities.includes(issue.severity)
299
+ );
300
+ }
301
+ const totalTokenCost = fileDuplicates.reduce(
302
+ (sum, dup) => sum + dup.tokenCost,
303
+ 0
304
+ );
305
+ results.push({
306
+ fileName: file,
307
+ issues: filteredIssues,
308
+ metrics: {
309
+ tokenCost: totalTokenCost,
310
+ consistencyScore: Math.max(0, 1 - fileDuplicates.length * 0.1)
311
+ }
312
+ });
313
+ }
314
+ let groups;
315
+ let clusters;
316
+ if (groupByFilePair) {
317
+ groups = groupDuplicatesByFilePair(duplicates);
318
+ }
319
+ if (createClusters) {
320
+ const allClusters = createRefactorClusters(duplicates);
321
+ clusters = filterClustersByImpact(
322
+ allClusters,
323
+ minClusterTokenCost,
324
+ minClusterFiles
325
+ );
326
+ }
327
+ return { results, duplicates, files, groups, clusters, config: finalOptions };
328
+ }
329
+ function generateSummary(results) {
330
+ const allIssues = results.flatMap((r) => r.issues);
331
+ const totalTokenCost = results.reduce(
332
+ (sum, r) => sum + (r.metrics.tokenCost || 0),
333
+ 0
334
+ );
335
+ const patternsByType = {
336
+ "api-handler": 0,
337
+ validator: 0,
338
+ utility: 0,
339
+ "class-method": 0,
340
+ component: 0,
341
+ function: 0,
342
+ unknown: 0
343
+ };
344
+ allIssues.forEach((issue) => {
345
+ const match = issue.message.match(/^(\S+(?:-\S+)*) pattern/);
346
+ if (match) {
347
+ const type = match[1];
348
+ patternsByType[type] = (patternsByType[type] || 0) + 1;
349
+ }
350
+ });
351
+ const topDuplicates = allIssues.slice(0, 10).map((issue) => {
352
+ const similarityMatch = issue.message.match(/(\d+)% similar/);
353
+ const tokenMatch = issue.message.match(/\((\d+) tokens/);
354
+ const typeMatch = issue.message.match(/^(\S+(?:-\S+)*) pattern/);
355
+ const fileMatch = issue.message.match(/similar to (.+?) \(/);
356
+ return {
357
+ files: [
358
+ {
359
+ path: issue.location.file,
360
+ startLine: issue.location.line,
361
+ endLine: 0
362
+ },
363
+ {
364
+ path: fileMatch?.[1] || "unknown",
365
+ startLine: 0,
366
+ endLine: 0
367
+ }
368
+ ],
369
+ similarity: similarityMatch ? parseInt(similarityMatch[1]) / 100 : 0,
370
+ confidence: similarityMatch ? parseInt(similarityMatch[1]) / 100 : 0,
371
+ // Fallback for summary
372
+ patternType: typeMatch?.[1] || "unknown",
373
+ tokenCost: tokenMatch ? parseInt(tokenMatch[1]) : 0
374
+ };
375
+ });
376
+ return {
377
+ totalPatterns: allIssues.length,
378
+ totalTokenCost,
379
+ patternsByType,
380
+ topDuplicates
381
+ };
382
+ }
383
+
384
+ export {
385
+ groupDuplicatesByFilePair,
386
+ createRefactorClusters,
387
+ filterClustersByImpact,
388
+ getSmartDefaults,
389
+ analyzePatterns,
390
+ generateSummary
391
+ };
package/dist/cli.js CHANGED
@@ -192,144 +192,27 @@ function filterBySeverity(duplicates, minSeverity) {
192
192
  });
193
193
  }
194
194
 
195
- // src/detector.ts
195
+ // src/core/normalizer.ts
196
196
  function normalizeCode(code, isPython = false) {
197
+ if (!code) return "";
197
198
  let normalized = code;
198
199
  if (isPython) {
199
200
  normalized = normalized.replace(/#.*/g, "");
200
201
  } else {
201
- normalized = normalized.replace(/\/\/.*/g, "").replace(/\/\*[\s\S]*?\*\//g, "");
202
+ normalized = normalized.replace(/\/\/.*$/gm, "").replace(/\/\*[\s\S]*?\*\//g, "");
202
203
  }
203
- return normalized.replace(/['"`]/g, '"').replace(/\s+/g, " ").trim().toLowerCase();
204
+ return normalized.replace(/"[^"]*"/g, '"STR"').replace(/'[^']*'/g, "'STR'").replace(/`[^`]*`/g, "`STR`").replace(/\b\d+\b/g, "NUM").replace(/\s+/g, " ").trim().toLowerCase();
204
205
  }
206
+
207
+ // src/detector.ts
205
208
  function extractBlocks(file, content) {
206
- const isPython = file.toLowerCase().endsWith(".py");
207
- if (isPython) {
208
- return extractBlocksPython(file, content);
209
- }
210
- const blocks = [];
211
- const lines = content.split("\n");
212
- const blockRegex = /^\s*(?:export\s+)?(?:async\s+)?(?:public\s+|private\s+|protected\s+|internal\s+|static\s+|readonly\s+|virtual\s+|abstract\s+|override\s+)*(function|class|interface|type|enum|record|struct|void|func|[a-zA-Z0-9_<>[]]+)\s+([a-zA-Z0-9_]+)(?:\s*\(|(?:\s+extends|\s+implements|\s+where)?\s*\{)|^\s*(?:export\s+)?const\s+([a-zA-Z0-9_]+)\s*=\s*[a-zA-Z0-9_.]+\.object\(|^\s*(app\.(?:get|post|put|delete|patch|use))\(/gm;
213
- let match;
214
- while ((match = blockRegex.exec(content)) !== null) {
215
- const startLine = content.substring(0, match.index).split("\n").length;
216
- let type;
217
- let name;
218
- if (match[1]) {
219
- type = match[1];
220
- name = match[2];
221
- } else if (match[3]) {
222
- type = "const";
223
- name = match[3];
224
- } else {
225
- type = "handler";
226
- name = match[4];
227
- }
228
- let endLine = -1;
229
- let openBraces = 0;
230
- let foundStart = false;
231
- for (let i = match.index; i < content.length; i++) {
232
- if (content[i] === "{") {
233
- openBraces++;
234
- foundStart = true;
235
- } else if (content[i] === "}") {
236
- openBraces--;
237
- }
238
- if (foundStart && openBraces === 0) {
239
- endLine = content.substring(0, i + 1).split("\n").length;
240
- break;
241
- }
242
- }
243
- if (endLine === -1) {
244
- const remaining = content.slice(match.index);
245
- const nextLineMatch = remaining.indexOf("\n");
246
- if (nextLineMatch !== -1) {
247
- endLine = startLine;
248
- } else {
249
- endLine = lines.length;
250
- }
251
- }
252
- endLine = Math.max(startLine, endLine);
253
- const blockCode = lines.slice(startLine - 1, endLine).join("\n");
254
- const tokens = (0, import_core2.estimateTokens)(blockCode);
255
- blocks.push({
256
- file,
257
- startLine,
258
- endLine,
259
- code: blockCode,
260
- tokens,
261
- patternType: inferPatternType(type, name)
262
- });
263
- }
264
- return blocks;
265
- }
266
- function extractBlocksPython(file, content) {
267
- const blocks = [];
268
- const lines = content.split("\n");
269
- const blockRegex = /^\s*(?:async\s+)?(def|class)\s+([a-zA-Z0-9_]+)/gm;
270
- let match;
271
- while ((match = blockRegex.exec(content)) !== null) {
272
- const startLinePos = content.substring(0, match.index).split("\n").length;
273
- const startLineIdx = startLinePos - 1;
274
- const initialIndent = lines[startLineIdx].search(/\S/);
275
- let endLineIdx = startLineIdx;
276
- for (let i = startLineIdx + 1; i < lines.length; i++) {
277
- const line = lines[i];
278
- if (line.trim().length === 0) {
279
- endLineIdx = i;
280
- continue;
281
- }
282
- const currentIndent = line.search(/\S/);
283
- if (currentIndent <= initialIndent) {
284
- break;
285
- }
286
- endLineIdx = i;
287
- }
288
- while (endLineIdx > startLineIdx && lines[endLineIdx].trim().length === 0) {
289
- endLineIdx--;
290
- }
291
- const blockCode = lines.slice(startLineIdx, endLineIdx + 1).join("\n");
292
- const tokens = (0, import_core2.estimateTokens)(blockCode);
293
- blocks.push({
294
- file,
295
- startLine: startLinePos,
296
- endLine: endLineIdx + 1,
297
- code: blockCode,
298
- tokens,
299
- patternType: inferPatternType(match[1], match[2])
300
- });
301
- }
302
- return blocks;
303
- }
304
- function inferPatternType(keyword, name) {
305
- const n = name.toLowerCase();
306
- if (keyword === "handler" || n.includes("handler") || n.includes("controller") || n.startsWith("app.")) {
307
- return "api-handler";
308
- }
309
- if (n.includes("validate") || n.includes("schema")) return "validator";
310
- if (n.includes("util") || n.includes("helper")) return "utility";
311
- if (keyword === "class") return "class-method";
312
- if (n.match(/^[A-Z]/)) return "component";
313
- if (keyword === "function") return "function";
314
- return "unknown";
209
+ return (0, import_core2.extractCodeBlocks)(file, content);
315
210
  }
316
211
  function calculateSimilarity(a, b) {
317
- if (a === b) return 1;
318
- const tokensA = a.split(/[^a-zA-Z0-9]+/).filter((t) => t.length > 0);
319
- const tokensB = b.split(/[^a-zA-Z0-9]+/).filter((t) => t.length > 0);
320
- if (tokensA.length === 0 || tokensB.length === 0) return 0;
321
- const setA = new Set(tokensA);
322
- const setB = new Set(tokensB);
323
- const intersection = new Set([...setA].filter((x) => setB.has(x)));
324
- const union = /* @__PURE__ */ new Set([...setA, ...setB]);
325
- return intersection.size / union.size;
212
+ return (0, import_core2.calculateStringSimilarity)(a, b);
326
213
  }
327
214
  function calculateConfidence(similarity, tokens, lines) {
328
- let confidence = similarity;
329
- if (lines > 20) confidence += 0.05;
330
- if (tokens > 200) confidence += 0.05;
331
- if (lines < 5) confidence -= 0.1;
332
- return Math.max(0, Math.min(1, confidence));
215
+ return (0, import_core2.calculateHeuristicConfidence)(similarity, tokens, lines);
333
216
  }
334
217
  async function detectDuplicatePatterns(fileContents, options) {
335
218
  const {
@@ -987,10 +870,14 @@ function generateHTMLReport(results, summary) {
987
870
  const { metadata } = data;
988
871
  const s = data.summary;
989
872
  const head = (0, import_core8.generateReportHead)("AIReady - Pattern Detection Report");
990
- const scoreCard = `<div class="stat-card" style="margin-bottom: 2rem;">
991
- <div class="stat-label">AI Ready Score (Deduplication)</div>
992
- <div class="stat-value">${Math.max(0, 100 - Math.round((s.duplicates?.length || 0) / (s.totalFiles || 1) * 20))}%</div>
993
- </div>`;
873
+ const score = Math.max(
874
+ 0,
875
+ 100 - Math.round((s.duplicates?.length || 0) / (s.totalFiles || 1) * 20)
876
+ );
877
+ const scoreCard = (0, import_core8.generateScoreCard)(
878
+ `${score}%`,
879
+ "AI Ready Score (Deduplication)"
880
+ );
994
881
  const stats = (0, import_core8.generateStatCards)([
995
882
  { value: s.totalFiles, label: "Files Analyzed" },
996
883
  { value: s.duplicates?.length || 0, label: "Duplicate Clusters" },
@@ -1189,12 +1076,7 @@ async function patternActionHandler(directory, options) {
1189
1076
  \u2713 HTML report saved to ${outputPath}`));
1190
1077
  return;
1191
1078
  }
1192
- const terminalWidth = process.stdout.columns || 80;
1193
- const dividerWidth = Math.min(60, terminalWidth - 2);
1194
- const divider = "\u2501".repeat(dividerWidth);
1195
- console.log(import_chalk.default.cyan(divider));
1196
- console.log(import_chalk.default.bold.white(" PATTERN ANALYSIS SUMMARY"));
1197
- console.log(import_chalk.default.cyan(divider) + "\n");
1079
+ (0, import_core9.printTerminalHeader)("PATTERN ANALYSIS SUMMARY");
1198
1080
  console.log(import_chalk.default.white(`\u{1F4C1} Files analyzed: ${import_chalk.default.bold(results.length)}`));
1199
1081
  console.log(
1200
1082
  import_chalk.default.yellow(
@@ -1209,9 +1091,9 @@ async function patternActionHandler(directory, options) {
1209
1091
  console.log(import_chalk.default.gray(`\u23F1 Analysis time: ${import_chalk.default.bold(elapsedTime + "s")}`));
1210
1092
  const sortedTypes = Object.entries(summary.patternsByType).filter(([, count]) => count > 0).sort(([, a], [, b]) => b - a);
1211
1093
  if (sortedTypes.length > 0) {
1212
- console.log(import_chalk.default.cyan("\n" + divider));
1094
+ console.log("\n" + (0, import_core9.getTerminalDivider)());
1213
1095
  console.log(import_chalk.default.bold.white(" PATTERNS BY TYPE"));
1214
- console.log(import_chalk.default.cyan(divider) + "\n");
1096
+ console.log((0, import_core9.getTerminalDivider)() + "\n");
1215
1097
  sortedTypes.forEach(([type, count]) => {
1216
1098
  const icon = getPatternIcon(type);
1217
1099
  console.log(
@@ -1220,11 +1102,11 @@ async function patternActionHandler(directory, options) {
1220
1102
  });
1221
1103
  }
1222
1104
  if (!finalOptions.showRawDuplicates && groups && groups.length > 0) {
1223
- console.log(import_chalk.default.cyan("\n" + divider));
1105
+ console.log("\n" + (0, import_core9.getTerminalDivider)());
1224
1106
  console.log(
1225
1107
  import_chalk.default.bold.white(` \u{1F4E6} DUPLICATE GROUPS (${groups.length} file pairs)`)
1226
1108
  );
1227
- console.log(import_chalk.default.cyan(divider) + "\n");
1109
+ console.log((0, import_core9.getTerminalDivider)() + "\n");
1228
1110
  const topGroups = groups.sort((a, b) => {
1229
1111
  const bVal = (0, import_core9.getSeverityValue)(b.severity);
1230
1112
  const aVal = (0, import_core9.getSeverityValue)(a.severity);
@@ -1265,11 +1147,11 @@ async function patternActionHandler(directory, options) {
1265
1147
  }
1266
1148
  }
1267
1149
  if (!finalOptions.showRawDuplicates && clusters && clusters.length > 0) {
1268
- console.log(import_chalk.default.cyan("\n" + divider));
1150
+ console.log("\n" + (0, import_core9.getTerminalDivider)());
1269
1151
  console.log(
1270
1152
  import_chalk.default.bold.white(` \u{1F3AF} REFACTOR CLUSTERS (${clusters.length} patterns)`)
1271
1153
  );
1272
- console.log(import_chalk.default.cyan(divider) + "\n");
1154
+ console.log((0, import_core9.getTerminalDivider)() + "\n");
1273
1155
  clusters.sort((a, b) => b.totalTokenCost - a.totalTokenCost).forEach((cluster, idx) => {
1274
1156
  const severityBadge = (0, import_core9.getSeverityBadge)(cluster.severity);
1275
1157
  console.log(`${idx + 1}. ${severityBadge} ${import_chalk.default.bold(cluster.name)}`);
@@ -1297,9 +1179,9 @@ async function patternActionHandler(directory, options) {
1297
1179
  });
1298
1180
  }
1299
1181
  if (totalIssues > 0 && (finalOptions.showRawDuplicates || !groups || groups.length === 0)) {
1300
- console.log(import_chalk.default.cyan("\n" + divider));
1182
+ console.log("\n" + (0, import_core9.getTerminalDivider)());
1301
1183
  console.log(import_chalk.default.bold.white(" TOP DUPLICATE PATTERNS"));
1302
- console.log(import_chalk.default.cyan(divider) + "\n");
1184
+ console.log((0, import_core9.getTerminalDivider)() + "\n");
1303
1185
  const topDuplicates = filteredDuplicates.sort((a, b) => {
1304
1186
  const bVal = (0, import_core9.getSeverityValue)(b.severity);
1305
1187
  const aVal = (0, import_core9.getSeverityValue)(a.severity);
@@ -1346,9 +1228,9 @@ async function patternActionHandler(directory, options) {
1346
1228
  (issue) => (0, import_core9.getSeverityValue)(issue.severity) === 4
1347
1229
  );
1348
1230
  if (criticalIssues.length > 0) {
1349
- console.log(import_chalk.default.cyan(divider));
1231
+ console.log((0, import_core9.getTerminalDivider)());
1350
1232
  console.log(import_chalk.default.bold.white(" CRITICAL ISSUES (>95% similar)"));
1351
- console.log(import_chalk.default.cyan(divider) + "\n");
1233
+ console.log((0, import_core9.getTerminalDivider)() + "\n");
1352
1234
  criticalIssues.slice(0, 5).forEach((issue) => {
1353
1235
  console.log(
1354
1236
  import_chalk.default.red("\u25CF ") + import_chalk.default.white(`${issue.file}:${issue.location.line}`)
@@ -1385,7 +1267,7 @@ async function patternActionHandler(directory, options) {
1385
1267
  );
1386
1268
  console.log("");
1387
1269
  }
1388
- console.log(import_chalk.default.cyan(divider));
1270
+ console.log((0, import_core9.getTerminalDivider)());
1389
1271
  if (totalIssues > 0) {
1390
1272
  console.log(
1391
1273
  import_chalk.default.white(