@aiready/pattern-detect 0.16.19 → 0.16.21

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (56) hide show
  1. package/dist/analyzer-entry/index.d.mts +3 -0
  2. package/dist/analyzer-entry/index.d.ts +3 -0
  3. package/dist/analyzer-entry/index.js +693 -0
  4. package/dist/analyzer-entry/index.mjs +12 -0
  5. package/dist/analyzer-entry.d.mts +100 -3
  6. package/dist/analyzer-entry.d.ts +100 -3
  7. package/dist/analyzer-entry.js +9 -126
  8. package/dist/analyzer-entry.mjs +2 -2
  9. package/dist/chunk-65UQ5J2J.mjs +64 -0
  10. package/dist/chunk-6JTVOBJX.mjs +64 -0
  11. package/dist/chunk-BKRPSTT2.mjs +64 -0
  12. package/dist/chunk-CMWW24HW.mjs +259 -0
  13. package/dist/chunk-DNZS4ESD.mjs +391 -0
  14. package/dist/chunk-GLKAGFKX.mjs +391 -0
  15. package/dist/chunk-GREN7X5H.mjs +143 -0
  16. package/dist/chunk-JBUZ6YHE.mjs +391 -0
  17. package/dist/chunk-KWMNN3TG.mjs +391 -0
  18. package/dist/chunk-LYKRYBSM.mjs +64 -0
  19. package/dist/chunk-MHU3CL4R.mjs +64 -0
  20. package/dist/chunk-RS73WLNI.mjs +251 -0
  21. package/dist/chunk-SVCSIZ2A.mjs +259 -0
  22. package/dist/chunk-VGMM3L3O.mjs +143 -0
  23. package/dist/chunk-XNPID6FU.mjs +391 -0
  24. package/dist/cli.js +29 -147
  25. package/dist/cli.mjs +27 -25
  26. package/dist/context-rules-entry/index.d.mts +2 -0
  27. package/dist/context-rules-entry/index.d.ts +2 -0
  28. package/dist/context-rules-entry/index.js +207 -0
  29. package/dist/context-rules-entry/index.mjs +12 -0
  30. package/dist/context-rules-entry.d.mts +55 -2
  31. package/dist/context-rules-entry.d.ts +55 -2
  32. package/dist/detector-entry/index.d.mts +14 -0
  33. package/dist/detector-entry/index.d.ts +14 -0
  34. package/dist/detector-entry/index.js +301 -0
  35. package/dist/detector-entry/index.mjs +7 -0
  36. package/dist/detector-entry.d.mts +2 -2
  37. package/dist/detector-entry.d.ts +2 -2
  38. package/dist/detector-entry.js +9 -126
  39. package/dist/detector-entry.mjs +1 -1
  40. package/dist/index-BVz-HnZd.d.mts +119 -0
  41. package/dist/index-BwuoiCNm.d.ts +119 -0
  42. package/dist/index-y2uJSngh.d.mts +60 -0
  43. package/dist/index-y2uJSngh.d.ts +60 -0
  44. package/dist/index.d.mts +4 -4
  45. package/dist/index.d.ts +4 -4
  46. package/dist/index.js +9 -126
  47. package/dist/index.mjs +3 -3
  48. package/dist/scoring-entry/index.d.mts +23 -0
  49. package/dist/scoring-entry/index.d.ts +23 -0
  50. package/dist/scoring-entry/index.js +133 -0
  51. package/dist/scoring-entry/index.mjs +6 -0
  52. package/dist/scoring-entry.d.mts +1 -1
  53. package/dist/scoring-entry.d.ts +1 -1
  54. package/dist/types-C4lmb2Yh.d.mts +36 -0
  55. package/dist/types-C4lmb2Yh.d.ts +36 -0
  56. package/package.json +16 -16
@@ -0,0 +1,301 @@
1
+ "use strict";
2
+ var __defProp = Object.defineProperty;
3
+ var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
4
+ var __getOwnPropNames = Object.getOwnPropertyNames;
5
+ var __hasOwnProp = Object.prototype.hasOwnProperty;
6
+ var __export = (target, all) => {
7
+ for (var name in all)
8
+ __defProp(target, name, { get: all[name], enumerable: true });
9
+ };
10
+ var __copyProps = (to, from, except, desc) => {
11
+ if (from && typeof from === "object" || typeof from === "function") {
12
+ for (let key of __getOwnPropNames(from))
13
+ if (!__hasOwnProp.call(to, key) && key !== except)
14
+ __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
15
+ }
16
+ return to;
17
+ };
18
+ var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
19
+
20
+ // src/detector-entry/index.ts
21
+ var detector_entry_exports = {};
22
+ __export(detector_entry_exports, {
23
+ detectDuplicatePatterns: () => detectDuplicatePatterns
24
+ });
25
+ module.exports = __toCommonJS(detector_entry_exports);
26
+
27
+ // src/detector.ts
28
+ var import_core2 = require("@aiready/core");
29
+
30
+ // src/context-rules.ts
31
+ var import_core = require("@aiready/core");
32
+ var CONTEXT_RULES = [
33
+ // Test Fixtures - Intentional duplication for test isolation
34
+ {
35
+ name: "test-fixtures",
36
+ detect: (file, code) => {
37
+ const isTestFile = file.includes(".test.") || file.includes(".spec.") || file.includes("__tests__") || file.includes("/test/") || file.includes("/tests/");
38
+ const hasTestFixtures = code.includes("beforeAll") || code.includes("afterAll") || code.includes("beforeEach") || code.includes("afterEach") || code.includes("setUp") || code.includes("tearDown");
39
+ return isTestFile && hasTestFixtures;
40
+ },
41
+ severity: import_core.Severity.Info,
42
+ reason: "Test fixture duplication is intentional for test isolation",
43
+ suggestion: "Consider if shared test setup would improve maintainability without coupling tests"
44
+ },
45
+ // Email/Document Templates - Often intentionally similar for consistency
46
+ {
47
+ name: "templates",
48
+ detect: (file, code) => {
49
+ const isTemplate = file.includes("/templates/") || file.includes("-template") || file.includes("/email-templates/") || file.includes("/emails/");
50
+ const hasTemplateContent = (code.includes("return") || code.includes("export")) && (code.includes("html") || code.includes("subject") || code.includes("body"));
51
+ return isTemplate && hasTemplateContent;
52
+ },
53
+ severity: import_core.Severity.Minor,
54
+ reason: "Template duplication may be intentional for maintainability and branding consistency",
55
+ suggestion: "Extract shared structure only if templates become hard to maintain"
56
+ },
57
+ // E2E/Integration Test Page Objects - Test independence
58
+ {
59
+ name: "e2e-page-objects",
60
+ detect: (file, code) => {
61
+ const isE2ETest = file.includes("e2e/") || file.includes("/e2e/") || file.includes(".e2e.") || file.includes("/playwright/") || file.includes("playwright/") || file.includes("/cypress/") || file.includes("cypress/") || file.includes("/integration/") || file.includes("integration/");
62
+ const hasPageObjectPatterns = code.includes("page.") || code.includes("await page") || code.includes("locator") || code.includes("getBy") || code.includes("selector") || code.includes("click(") || code.includes("fill(");
63
+ return isE2ETest && hasPageObjectPatterns;
64
+ },
65
+ severity: import_core.Severity.Minor,
66
+ reason: "E2E test duplication ensures test independence and reduces coupling",
67
+ suggestion: "Consider page object pattern only if duplication causes maintenance issues"
68
+ },
69
+ // Configuration Files - Often necessarily similar by design
70
+ {
71
+ name: "config-files",
72
+ detect: (file) => {
73
+ return file.endsWith(".config.ts") || file.endsWith(".config.js") || file.includes("jest.config") || file.includes("vite.config") || file.includes("webpack.config") || file.includes("rollup.config") || file.includes("tsconfig");
74
+ },
75
+ severity: import_core.Severity.Minor,
76
+ reason: "Configuration files often have similar structure by design",
77
+ suggestion: "Consider shared config base only if configurations become hard to maintain"
78
+ },
79
+ // Type Definitions - Duplication for type safety and module independence
80
+ {
81
+ name: "type-definitions",
82
+ detect: (file, code) => {
83
+ const isTypeFile = file.endsWith(".d.ts") || file.includes("/types/");
84
+ const hasTypeDefinitions = code.includes("interface ") || code.includes("type ") || code.includes("enum ");
85
+ return isTypeFile && hasTypeDefinitions;
86
+ },
87
+ severity: import_core.Severity.Info,
88
+ reason: "Type duplication may be intentional for module independence and type safety",
89
+ suggestion: "Extract to shared types package only if causing maintenance burden"
90
+ },
91
+ // Migration Scripts - One-off scripts that are similar by nature
92
+ {
93
+ name: "migration-scripts",
94
+ detect: (file) => {
95
+ return file.includes("/migrations/") || file.includes("/migrate/") || file.includes(".migration.");
96
+ },
97
+ severity: import_core.Severity.Info,
98
+ reason: "Migration scripts are typically one-off and intentionally similar",
99
+ suggestion: "Duplication is acceptable for migration scripts"
100
+ },
101
+ // Mock Data - Test data intentionally duplicated
102
+ {
103
+ name: "mock-data",
104
+ detect: (file, code) => {
105
+ const isMockFile = file.includes("/mocks/") || file.includes("/__mocks__/") || file.includes("/fixtures/") || file.includes(".mock.") || file.includes(".fixture.");
106
+ const hasMockData = code.includes("mock") || code.includes("Mock") || code.includes("fixture") || code.includes("stub") || code.includes("export const");
107
+ return isMockFile && hasMockData;
108
+ },
109
+ severity: import_core.Severity.Info,
110
+ reason: "Mock data duplication is expected for comprehensive test coverage",
111
+ suggestion: "Consider shared factories only for complex mock generation"
112
+ },
113
+ // Tool Implementations - Structural Boilerplate
114
+ {
115
+ name: "tool-implementations",
116
+ detect: (file, code) => {
117
+ const isToolFile = file.includes("/tools/") || file.endsWith(".tool.ts") || code.includes("toolDefinitions");
118
+ const hasToolStructure = code.includes("execute") && (code.includes("try") || code.includes("catch"));
119
+ return isToolFile && hasToolStructure;
120
+ },
121
+ severity: import_core.Severity.Info,
122
+ reason: "Tool implementations share structural boilerplate but have distinct business logic",
123
+ suggestion: "Tool duplication is acceptable for boilerplate interface wrappers"
124
+ }
125
+ ];
126
+ function calculateSeverity(file1, file2, code, similarity, linesOfCode) {
127
+ for (const rule of CONTEXT_RULES) {
128
+ if (rule.detect(file1, code) || rule.detect(file2, code)) {
129
+ return {
130
+ severity: rule.severity,
131
+ reason: rule.reason,
132
+ suggestion: rule.suggestion,
133
+ matchedRule: rule.name
134
+ };
135
+ }
136
+ }
137
+ if (similarity >= 0.95 && linesOfCode >= 30) {
138
+ return {
139
+ severity: import_core.Severity.Critical,
140
+ reason: "Large nearly-identical code blocks waste tokens and create maintenance burden",
141
+ suggestion: "Extract to shared utility module immediately"
142
+ };
143
+ } else if (similarity >= 0.95 && linesOfCode >= 15) {
144
+ return {
145
+ severity: import_core.Severity.Major,
146
+ reason: "Nearly identical code should be consolidated",
147
+ suggestion: "Move to shared utility file"
148
+ };
149
+ } else if (similarity >= 0.85) {
150
+ return {
151
+ severity: import_core.Severity.Major,
152
+ reason: "High similarity indicates significant duplication",
153
+ suggestion: "Extract common logic to shared function"
154
+ };
155
+ } else if (similarity >= 0.7) {
156
+ return {
157
+ severity: import_core.Severity.Minor,
158
+ reason: "Moderate similarity detected",
159
+ suggestion: "Consider extracting shared patterns if code evolves together"
160
+ };
161
+ } else {
162
+ return {
163
+ severity: import_core.Severity.Minor,
164
+ reason: "Minor similarity detected",
165
+ suggestion: "Monitor but refactoring may not be worthwhile"
166
+ };
167
+ }
168
+ }
169
+
170
+ // src/core/normalizer.ts
171
+ function normalizeCode(code, isPython = false) {
172
+ if (!code) return "";
173
+ let normalized = code;
174
+ if (isPython) {
175
+ normalized = normalized.replace(/#.*/g, "");
176
+ } else {
177
+ normalized = normalized.replace(/\/\/.*$/gm, "").replace(/\/\*[\s\S]*?\*\//g, "");
178
+ }
179
+ return normalized.replace(/"[^"]*"/g, '"STR"').replace(/'[^']*'/g, "'STR'").replace(/`[^`]*`/g, "`STR`").replace(/\b\d+\b/g, "NUM").replace(/\s+/g, " ").trim().toLowerCase();
180
+ }
181
+
182
+ // src/detector.ts
183
+ function extractBlocks(file, content) {
184
+ return (0, import_core2.extractCodeBlocks)(file, content);
185
+ }
186
+ function calculateSimilarity(a, b) {
187
+ return (0, import_core2.calculateStringSimilarity)(a, b);
188
+ }
189
+ function calculateConfidence(similarity, tokens, lines) {
190
+ return (0, import_core2.calculateHeuristicConfidence)(similarity, tokens, lines);
191
+ }
192
+ async function detectDuplicatePatterns(fileContents, options) {
193
+ const {
194
+ minSimilarity,
195
+ minLines,
196
+ streamResults,
197
+ onProgress,
198
+ excludePatterns = [],
199
+ confidenceThreshold = 0,
200
+ ignoreWhitelist = []
201
+ } = options;
202
+ const allBlocks = [];
203
+ const excludeRegexes = excludePatterns.map((p) => new RegExp(p, "i"));
204
+ for (const { file, content } of fileContents) {
205
+ const blocks = extractBlocks(file, content);
206
+ for (const b of blocks) {
207
+ if (b.endLine - b.startLine + 1 < minLines) continue;
208
+ const isExcluded = excludeRegexes.some((regex) => regex.test(b.code));
209
+ if (isExcluded) continue;
210
+ allBlocks.push(b);
211
+ }
212
+ }
213
+ const duplicates = [];
214
+ const totalBlocks = allBlocks.length;
215
+ let comparisons = 0;
216
+ const totalComparisons = totalBlocks * (totalBlocks - 1) / 2;
217
+ if (onProgress) {
218
+ onProgress(
219
+ 0,
220
+ totalComparisons,
221
+ `Starting duplicate detection on ${totalBlocks} blocks...`
222
+ );
223
+ }
224
+ for (let i = 0; i < allBlocks.length; i++) {
225
+ if (i % 50 === 0 && i > 0) {
226
+ await new Promise((resolve) => setImmediate(resolve));
227
+ if (onProgress) {
228
+ onProgress(
229
+ comparisons,
230
+ totalComparisons,
231
+ `Analyzing blocks (${i}/${totalBlocks})...`
232
+ );
233
+ }
234
+ }
235
+ const b1 = allBlocks[i];
236
+ const isPython1 = b1.file.toLowerCase().endsWith(".py");
237
+ const norm1 = normalizeCode(b1.code, isPython1);
238
+ for (let j = i + 1; j < allBlocks.length; j++) {
239
+ comparisons++;
240
+ const b2 = allBlocks[j];
241
+ if (b1.file === b2.file) continue;
242
+ const isWhitelisted = ignoreWhitelist.some((pattern) => {
243
+ return b1.file.includes(pattern) && b2.file.includes(pattern) || pattern === `${b1.file}::${b2.file}` || pattern === `${b2.file}::${b1.file}`;
244
+ });
245
+ if (isWhitelisted) continue;
246
+ const isPython2 = b2.file.toLowerCase().endsWith(".py");
247
+ const norm2 = normalizeCode(b2.code, isPython2);
248
+ const sim = calculateSimilarity(norm1, norm2);
249
+ if (sim >= minSimilarity) {
250
+ const confidence = calculateConfidence(
251
+ sim,
252
+ b1.tokens,
253
+ b1.endLine - b1.startLine + 1
254
+ );
255
+ if (confidence < confidenceThreshold) continue;
256
+ const { severity, reason, suggestion, matchedRule } = calculateSeverity(
257
+ b1.file,
258
+ b2.file,
259
+ b1.code,
260
+ sim,
261
+ b1.endLine - b1.startLine + 1
262
+ );
263
+ const dup = {
264
+ file1: b1.file,
265
+ line1: b1.startLine,
266
+ endLine1: b1.endLine,
267
+ file2: b2.file,
268
+ line2: b2.startLine,
269
+ endLine2: b2.endLine,
270
+ code1: b1.code,
271
+ code2: b2.code,
272
+ similarity: sim,
273
+ confidence,
274
+ patternType: b1.patternType,
275
+ tokenCost: b1.tokens + b2.tokens,
276
+ severity,
277
+ reason,
278
+ suggestion,
279
+ matchedRule
280
+ };
281
+ duplicates.push(dup);
282
+ if (streamResults)
283
+ console.log(
284
+ `[DUPLICATE] ${dup.file1}:${dup.line1} <-> ${dup.file2}:${dup.line2} (${Math.round(sim * 100)}%, conf: ${Math.round(confidence * 100)}%)`
285
+ );
286
+ }
287
+ }
288
+ }
289
+ if (onProgress) {
290
+ onProgress(
291
+ totalComparisons,
292
+ totalComparisons,
293
+ `Duplicate detection complete. Found ${duplicates.length} patterns.`
294
+ );
295
+ }
296
+ return duplicates.sort((a, b) => b.similarity - a.similarity);
297
+ }
298
+ // Annotate the CommonJS export names for ESM import in node:
299
+ 0 && (module.exports = {
300
+ detectDuplicatePatterns
301
+ });
@@ -0,0 +1,7 @@
1
+ import {
2
+ detectDuplicatePatterns
3
+ } from "../chunk-VGMM3L3O.mjs";
4
+ import "../chunk-I6ETJC7L.mjs";
5
+ export {
6
+ detectDuplicatePatterns
7
+ };
@@ -1,5 +1,5 @@
1
- import { a as DetectionOptions, D as DuplicatePattern } from './types-DU2mmhwb.mjs';
2
- export { P as PatternType } from './types-DU2mmhwb.mjs';
1
+ import { D as DetectionOptions, a as DuplicatePattern } from './types-C4lmb2Yh.mjs';
2
+ export { P as PatternType } from './types-C4lmb2Yh.mjs';
3
3
  import { FileContent } from '@aiready/core';
4
4
 
5
5
  /**
@@ -1,5 +1,5 @@
1
- import { a as DetectionOptions, D as DuplicatePattern } from './types-DU2mmhwb.js';
2
- export { P as PatternType } from './types-DU2mmhwb.js';
1
+ import { D as DetectionOptions, a as DuplicatePattern } from './types-C4lmb2Yh.js';
2
+ export { P as PatternType } from './types-C4lmb2Yh.js';
3
3
  import { FileContent } from '@aiready/core';
4
4
 
5
5
  /**
@@ -167,144 +167,27 @@ function calculateSeverity(file1, file2, code, similarity, linesOfCode) {
167
167
  }
168
168
  }
169
169
 
170
- // src/detector.ts
170
+ // src/core/normalizer.ts
171
171
  function normalizeCode(code, isPython = false) {
172
+ if (!code) return "";
172
173
  let normalized = code;
173
174
  if (isPython) {
174
175
  normalized = normalized.replace(/#.*/g, "");
175
176
  } else {
176
- normalized = normalized.replace(/\/\/.*/g, "").replace(/\/\*[\s\S]*?\*\//g, "");
177
+ normalized = normalized.replace(/\/\/.*$/gm, "").replace(/\/\*[\s\S]*?\*\//g, "");
177
178
  }
178
- return normalized.replace(/['"`]/g, '"').replace(/\s+/g, " ").trim().toLowerCase();
179
+ return normalized.replace(/"[^"]*"/g, '"STR"').replace(/'[^']*'/g, "'STR'").replace(/`[^`]*`/g, "`STR`").replace(/\b\d+\b/g, "NUM").replace(/\s+/g, " ").trim().toLowerCase();
179
180
  }
181
+
182
+ // src/detector.ts
180
183
  function extractBlocks(file, content) {
181
- const isPython = file.toLowerCase().endsWith(".py");
182
- if (isPython) {
183
- return extractBlocksPython(file, content);
184
- }
185
- const blocks = [];
186
- const lines = content.split("\n");
187
- const blockRegex = /^\s*(?:export\s+)?(?:async\s+)?(?:public\s+|private\s+|protected\s+|internal\s+|static\s+|readonly\s+|virtual\s+|abstract\s+|override\s+)*(function|class|interface|type|enum|record|struct|void|func|[a-zA-Z0-9_<>[]]+)\s+([a-zA-Z0-9_]+)(?:\s*\(|(?:\s+extends|\s+implements|\s+where)?\s*\{)|^\s*(?:export\s+)?const\s+([a-zA-Z0-9_]+)\s*=\s*[a-zA-Z0-9_.]+\.object\(|^\s*(app\.(?:get|post|put|delete|patch|use))\(/gm;
188
- let match;
189
- while ((match = blockRegex.exec(content)) !== null) {
190
- const startLine = content.substring(0, match.index).split("\n").length;
191
- let type;
192
- let name;
193
- if (match[1]) {
194
- type = match[1];
195
- name = match[2];
196
- } else if (match[3]) {
197
- type = "const";
198
- name = match[3];
199
- } else {
200
- type = "handler";
201
- name = match[4];
202
- }
203
- let endLine = -1;
204
- let openBraces = 0;
205
- let foundStart = false;
206
- for (let i = match.index; i < content.length; i++) {
207
- if (content[i] === "{") {
208
- openBraces++;
209
- foundStart = true;
210
- } else if (content[i] === "}") {
211
- openBraces--;
212
- }
213
- if (foundStart && openBraces === 0) {
214
- endLine = content.substring(0, i + 1).split("\n").length;
215
- break;
216
- }
217
- }
218
- if (endLine === -1) {
219
- const remaining = content.slice(match.index);
220
- const nextLineMatch = remaining.indexOf("\n");
221
- if (nextLineMatch !== -1) {
222
- endLine = startLine;
223
- } else {
224
- endLine = lines.length;
225
- }
226
- }
227
- endLine = Math.max(startLine, endLine);
228
- const blockCode = lines.slice(startLine - 1, endLine).join("\n");
229
- const tokens = (0, import_core2.estimateTokens)(blockCode);
230
- blocks.push({
231
- file,
232
- startLine,
233
- endLine,
234
- code: blockCode,
235
- tokens,
236
- patternType: inferPatternType(type, name)
237
- });
238
- }
239
- return blocks;
240
- }
241
- function extractBlocksPython(file, content) {
242
- const blocks = [];
243
- const lines = content.split("\n");
244
- const blockRegex = /^\s*(?:async\s+)?(def|class)\s+([a-zA-Z0-9_]+)/gm;
245
- let match;
246
- while ((match = blockRegex.exec(content)) !== null) {
247
- const startLinePos = content.substring(0, match.index).split("\n").length;
248
- const startLineIdx = startLinePos - 1;
249
- const initialIndent = lines[startLineIdx].search(/\S/);
250
- let endLineIdx = startLineIdx;
251
- for (let i = startLineIdx + 1; i < lines.length; i++) {
252
- const line = lines[i];
253
- if (line.trim().length === 0) {
254
- endLineIdx = i;
255
- continue;
256
- }
257
- const currentIndent = line.search(/\S/);
258
- if (currentIndent <= initialIndent) {
259
- break;
260
- }
261
- endLineIdx = i;
262
- }
263
- while (endLineIdx > startLineIdx && lines[endLineIdx].trim().length === 0) {
264
- endLineIdx--;
265
- }
266
- const blockCode = lines.slice(startLineIdx, endLineIdx + 1).join("\n");
267
- const tokens = (0, import_core2.estimateTokens)(blockCode);
268
- blocks.push({
269
- file,
270
- startLine: startLinePos,
271
- endLine: endLineIdx + 1,
272
- code: blockCode,
273
- tokens,
274
- patternType: inferPatternType(match[1], match[2])
275
- });
276
- }
277
- return blocks;
278
- }
279
- function inferPatternType(keyword, name) {
280
- const n = name.toLowerCase();
281
- if (keyword === "handler" || n.includes("handler") || n.includes("controller") || n.startsWith("app.")) {
282
- return "api-handler";
283
- }
284
- if (n.includes("validate") || n.includes("schema")) return "validator";
285
- if (n.includes("util") || n.includes("helper")) return "utility";
286
- if (keyword === "class") return "class-method";
287
- if (n.match(/^[A-Z]/)) return "component";
288
- if (keyword === "function") return "function";
289
- return "unknown";
184
+ return (0, import_core2.extractCodeBlocks)(file, content);
290
185
  }
291
186
  function calculateSimilarity(a, b) {
292
- if (a === b) return 1;
293
- const tokensA = a.split(/[^a-zA-Z0-9]+/).filter((t) => t.length > 0);
294
- const tokensB = b.split(/[^a-zA-Z0-9]+/).filter((t) => t.length > 0);
295
- if (tokensA.length === 0 || tokensB.length === 0) return 0;
296
- const setA = new Set(tokensA);
297
- const setB = new Set(tokensB);
298
- const intersection = new Set([...setA].filter((x) => setB.has(x)));
299
- const union = /* @__PURE__ */ new Set([...setA, ...setB]);
300
- return intersection.size / union.size;
187
+ return (0, import_core2.calculateStringSimilarity)(a, b);
301
188
  }
302
189
  function calculateConfidence(similarity, tokens, lines) {
303
- let confidence = similarity;
304
- if (lines > 20) confidence += 0.05;
305
- if (tokens > 200) confidence += 0.05;
306
- if (lines < 5) confidence -= 0.1;
307
- return Math.max(0, Math.min(1, confidence));
190
+ return (0, import_core2.calculateHeuristicConfidence)(similarity, tokens, lines);
308
191
  }
309
192
  async function detectDuplicatePatterns(fileContents, options) {
310
193
  const {
@@ -1,6 +1,6 @@
1
1
  import {
2
2
  detectDuplicatePatterns
3
- } from "./chunk-THF4RW63.mjs";
3
+ } from "./chunk-VGMM3L3O.mjs";
4
4
  import "./chunk-I6ETJC7L.mjs";
5
5
  export {
6
6
  detectDuplicatePatterns
@@ -0,0 +1,119 @@
1
+ import { Severity, ScanOptions, AnalysisResult } from '@aiready/core';
2
+ import { P as PatternType, D as DuplicatePattern } from './types-DU2mmhwb.mjs';
3
+
4
+ interface DuplicateGroup {
5
+ filePair: string;
6
+ severity: Severity;
7
+ occurrences: number;
8
+ totalTokenCost: number;
9
+ averageSimilarity: number;
10
+ patternTypes: Set<PatternType>;
11
+ lineRanges: Array<{
12
+ file1: {
13
+ start: number;
14
+ end: number;
15
+ };
16
+ file2: {
17
+ start: number;
18
+ end: number;
19
+ };
20
+ }>;
21
+ }
22
+ interface RefactorCluster {
23
+ id: string;
24
+ name: string;
25
+ files: string[];
26
+ severity: Severity;
27
+ duplicateCount: number;
28
+ totalTokenCost: number;
29
+ averageSimilarity: number;
30
+ reason?: string;
31
+ suggestion?: string;
32
+ }
33
+ /**
34
+ * Group raw duplicates by file pairs to reduce noise
35
+ */
36
+ declare function groupDuplicatesByFilePair(duplicates: DuplicatePattern[]): DuplicateGroup[];
37
+ /**
38
+ * Create clusters of highly related files (refactor targets)
39
+ * Uses a simple connected components algorithm
40
+ * @param duplicates - Array of duplicate patterns to cluster
41
+ * @returns Array of refactor clusters
42
+ */
43
+ declare function createRefactorClusters(duplicates: DuplicatePattern[]): RefactorCluster[];
44
+ /**
45
+ * Filter clusters by impact threshold
46
+ * @param clusters - Array of refactor clusters to filter
47
+ * @param minTokenCost - Minimum token cost threshold (default: 1000)
48
+ * @param minFiles - Minimum number of files in cluster (default: 3)
49
+ * @returns Filtered array of refactor clusters
50
+ */
51
+ declare function filterClustersByImpact(clusters: RefactorCluster[], minTokenCost?: number, minFiles?: number): RefactorCluster[];
52
+
53
+ interface PatternDetectOptions extends ScanOptions {
54
+ minSimilarity?: number;
55
+ minLines?: number;
56
+ batchSize?: number;
57
+ approx?: boolean;
58
+ minSharedTokens?: number;
59
+ maxCandidatesPerBlock?: number;
60
+ streamResults?: boolean;
61
+ severity?: string;
62
+ includeTests?: boolean;
63
+ useSmartDefaults?: boolean;
64
+ groupByFilePair?: boolean;
65
+ createClusters?: boolean;
66
+ minClusterTokenCost?: number;
67
+ minClusterFiles?: number;
68
+ excludePatterns?: string[];
69
+ confidenceThreshold?: number;
70
+ ignoreWhitelist?: string[];
71
+ onProgress?: (processed: number, total: number, message: string) => void;
72
+ }
73
+ interface PatternSummary {
74
+ totalPatterns: number;
75
+ totalTokenCost: number;
76
+ patternsByType: Record<PatternType, number>;
77
+ topDuplicates: Array<{
78
+ files: Array<{
79
+ path: string;
80
+ startLine: number;
81
+ endLine: number;
82
+ }>;
83
+ similarity: number;
84
+ patternType: PatternType;
85
+ tokenCost: number;
86
+ }>;
87
+ }
88
+ /**
89
+ * Determine smart defaults based on repository size estimation.
90
+ *
91
+ * @param directory - The directory to analyze for size.
92
+ * @param userOptions - User-provided option overrides.
93
+ * @returns Promise resolving to optimal detection options.
94
+ */
95
+ declare function getSmartDefaults(directory: string, userOptions: Partial<PatternDetectOptions>): Promise<PatternDetectOptions>;
96
+ /**
97
+ * Main entry point for pattern detection analysis.
98
+ *
99
+ * @param options - Configuration including rootDir and detection parameters.
100
+ * @returns Promise resolving to the comprehensive pattern detect report.
101
+ * @lastUpdated 2026-03-18
102
+ */
103
+ declare function analyzePatterns(options: PatternDetectOptions): Promise<{
104
+ results: AnalysisResult[];
105
+ duplicates: DuplicatePattern[];
106
+ files: string[];
107
+ groups?: DuplicateGroup[];
108
+ clusters?: RefactorCluster[];
109
+ config: PatternDetectOptions;
110
+ }>;
111
+ /**
112
+ * Generate a summary of pattern detection results.
113
+ *
114
+ * @param results - Array of file-level analysis results.
115
+ * @returns Consolidated pattern summary object.
116
+ */
117
+ declare function generateSummary(results: AnalysisResult[]): PatternSummary;
118
+
119
+ export { type DuplicateGroup as D, type PatternDetectOptions as P, type RefactorCluster as R, type PatternSummary as a, analyzePatterns as b, createRefactorClusters as c, getSmartDefaults as d, groupDuplicatesByFilePair as e, filterClustersByImpact as f, generateSummary as g };