@aiready/pattern-detect 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,50 @@
1
+ import { ScanOptions, AnalysisResult } from '@aiready/core';
2
+
3
+ interface DuplicatePattern {
4
+ file1: string;
5
+ file2: string;
6
+ line1: number;
7
+ line2: number;
8
+ similarity: number;
9
+ snippet: string;
10
+ patternType: PatternType;
11
+ tokenCost: number;
12
+ linesOfCode: number;
13
+ }
14
+ type PatternType = 'function' | 'class-method' | 'api-handler' | 'validator' | 'utility' | 'component' | 'unknown';
15
+ interface FileContent {
16
+ file: string;
17
+ content: string;
18
+ }
19
+ interface DetectionOptions {
20
+ minSimilarity: number;
21
+ minLines: number;
22
+ }
23
+ /**
24
+ * Detect duplicate patterns across files with enhanced analysis
25
+ */
26
+ declare function detectDuplicatePatterns(files: FileContent[], options: DetectionOptions): DuplicatePattern[];
27
+
28
+ interface PatternDetectOptions extends ScanOptions {
29
+ minSimilarity?: number;
30
+ minLines?: number;
31
+ }
32
+ interface PatternSummary {
33
+ totalPatterns: number;
34
+ totalTokenCost: number;
35
+ patternsByType: Record<PatternType, number>;
36
+ topDuplicates: Array<{
37
+ file1: string;
38
+ file2: string;
39
+ similarity: number;
40
+ patternType: PatternType;
41
+ tokenCost: number;
42
+ }>;
43
+ }
44
+ declare function analyzePatterns(options: PatternDetectOptions): Promise<AnalysisResult[]>;
45
+ /**
46
+ * Generate a summary of pattern analysis
47
+ */
48
+ declare function generateSummary(results: AnalysisResult[]): PatternSummary;
49
+
50
+ export { type DuplicatePattern, type PatternDetectOptions, type PatternSummary, type PatternType, analyzePatterns, detectDuplicatePatterns, generateSummary };
@@ -0,0 +1,50 @@
1
+ import { ScanOptions, AnalysisResult } from '@aiready/core';
2
+
3
+ interface DuplicatePattern {
4
+ file1: string;
5
+ file2: string;
6
+ line1: number;
7
+ line2: number;
8
+ similarity: number;
9
+ snippet: string;
10
+ patternType: PatternType;
11
+ tokenCost: number;
12
+ linesOfCode: number;
13
+ }
14
+ type PatternType = 'function' | 'class-method' | 'api-handler' | 'validator' | 'utility' | 'component' | 'unknown';
15
+ interface FileContent {
16
+ file: string;
17
+ content: string;
18
+ }
19
+ interface DetectionOptions {
20
+ minSimilarity: number;
21
+ minLines: number;
22
+ }
23
+ /**
24
+ * Detect duplicate patterns across files with enhanced analysis
25
+ */
26
+ declare function detectDuplicatePatterns(files: FileContent[], options: DetectionOptions): DuplicatePattern[];
27
+
28
+ interface PatternDetectOptions extends ScanOptions {
29
+ minSimilarity?: number;
30
+ minLines?: number;
31
+ }
32
+ interface PatternSummary {
33
+ totalPatterns: number;
34
+ totalTokenCost: number;
35
+ patternsByType: Record<PatternType, number>;
36
+ topDuplicates: Array<{
37
+ file1: string;
38
+ file2: string;
39
+ similarity: number;
40
+ patternType: PatternType;
41
+ tokenCost: number;
42
+ }>;
43
+ }
44
+ declare function analyzePatterns(options: PatternDetectOptions): Promise<AnalysisResult[]>;
45
+ /**
46
+ * Generate a summary of pattern analysis
47
+ */
48
+ declare function generateSummary(results: AnalysisResult[]): PatternSummary;
49
+
50
+ export { type DuplicatePattern, type PatternDetectOptions, type PatternSummary, type PatternType, analyzePatterns, detectDuplicatePatterns, generateSummary };
package/dist/index.js ADDED
@@ -0,0 +1,253 @@
1
+ "use strict";
2
+ var __defProp = Object.defineProperty;
3
+ var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
4
+ var __getOwnPropNames = Object.getOwnPropertyNames;
5
+ var __hasOwnProp = Object.prototype.hasOwnProperty;
6
+ var __export = (target, all) => {
7
+ for (var name in all)
8
+ __defProp(target, name, { get: all[name], enumerable: true });
9
+ };
10
+ var __copyProps = (to, from, except, desc) => {
11
+ if (from && typeof from === "object" || typeof from === "function") {
12
+ for (let key of __getOwnPropNames(from))
13
+ if (!__hasOwnProp.call(to, key) && key !== except)
14
+ __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
15
+ }
16
+ return to;
17
+ };
18
+ var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
19
+
20
+ // src/index.ts
21
+ var index_exports = {};
22
+ __export(index_exports, {
23
+ analyzePatterns: () => analyzePatterns,
24
+ detectDuplicatePatterns: () => detectDuplicatePatterns,
25
+ generateSummary: () => generateSummary
26
+ });
27
+ module.exports = __toCommonJS(index_exports);
28
+ var import_core2 = require("@aiready/core");
29
+
30
+ // src/detector.ts
31
+ var import_core = require("@aiready/core");
32
+ function categorizePattern(code) {
33
+ const lower = code.toLowerCase();
34
+ if (lower.includes("request") && lower.includes("response") || lower.includes("router.") || lower.includes("app.get") || lower.includes("app.post") || lower.includes("express") || lower.includes("ctx.body")) {
35
+ return "api-handler";
36
+ }
37
+ if (lower.includes("validate") || lower.includes("schema") || lower.includes("zod") || lower.includes("yup") || lower.includes("if") && lower.includes("throw")) {
38
+ return "validator";
39
+ }
40
+ if (lower.includes("return (") || lower.includes("jsx") || lower.includes("component") || lower.includes("props")) {
41
+ return "component";
42
+ }
43
+ if (lower.includes("class ") || lower.includes("this.")) {
44
+ return "class-method";
45
+ }
46
+ if (lower.includes("return ") && !lower.includes("this") && !lower.includes("new ")) {
47
+ return "utility";
48
+ }
49
+ if (lower.includes("function") || lower.includes("=>")) {
50
+ return "function";
51
+ }
52
+ return "unknown";
53
+ }
54
+ function extractCodeBlocks(content, minLines) {
55
+ const lines = content.split("\n");
56
+ const blocks = [];
57
+ let currentBlock = [];
58
+ let blockStart = 0;
59
+ let braceDepth = 0;
60
+ let inFunction = false;
61
+ for (let i = 0; i < lines.length; i++) {
62
+ const line = lines[i];
63
+ const trimmed = line.trim();
64
+ if (!inFunction && (trimmed.includes("function ") || trimmed.includes("=>") || trimmed.includes("async ") || /^(export\s+)?(async\s+)?function\s+/.test(trimmed) || /^(export\s+)?const\s+\w+\s*=\s*(async\s*)?\(/.test(trimmed))) {
65
+ inFunction = true;
66
+ blockStart = i;
67
+ }
68
+ for (const char of line) {
69
+ if (char === "{") braceDepth++;
70
+ if (char === "}") braceDepth--;
71
+ }
72
+ if (inFunction) {
73
+ currentBlock.push(line);
74
+ }
75
+ if (inFunction && braceDepth === 0 && currentBlock.length >= minLines) {
76
+ const blockContent = currentBlock.join("\n");
77
+ const linesOfCode = currentBlock.filter(
78
+ (l) => l.trim() && !l.trim().startsWith("//")
79
+ ).length;
80
+ blocks.push({
81
+ content: blockContent,
82
+ startLine: blockStart + 1,
83
+ patternType: categorizePattern(blockContent),
84
+ linesOfCode
85
+ });
86
+ currentBlock = [];
87
+ inFunction = false;
88
+ } else if (inFunction && braceDepth === 0) {
89
+ currentBlock = [];
90
+ inFunction = false;
91
+ }
92
+ }
93
+ return blocks;
94
+ }
95
+ function normalizeCode(code) {
96
+ return code.replace(/\/\/.*$/gm, "").replace(/\/\*[\s\S]*?\*\//g, "").replace(/"[^"]*"/g, '"STR"').replace(/'[^']*'/g, "'STR'").replace(/`[^`]*`/g, "`STR`").replace(/\b\d+\b/g, "NUM").replace(/\s+/g, " ").trim();
97
+ }
98
+ function calculateSimilarity(block1, block2) {
99
+ const norm1 = normalizeCode(block1);
100
+ const norm2 = normalizeCode(block2);
101
+ const baseSimilarity = (0, import_core.similarityScore)(norm1, norm2);
102
+ const tokens1 = norm1.split(/[\s(){}[\];,]+/).filter(Boolean);
103
+ const tokens2 = norm2.split(/[\s(){}[\];,]+/).filter(Boolean);
104
+ const tokenSimilarity = (0, import_core.similarityScore)(tokens1.join(" "), tokens2.join(" "));
105
+ return baseSimilarity * 0.4 + tokenSimilarity * 0.6;
106
+ }
107
+ function detectDuplicatePatterns(files, options) {
108
+ const { minSimilarity, minLines } = options;
109
+ const duplicates = [];
110
+ const allBlocks = files.flatMap(
111
+ (file) => extractCodeBlocks(file.content, minLines).map((block) => ({
112
+ ...block,
113
+ file: file.file,
114
+ normalized: normalizeCode(block.content),
115
+ tokenCost: (0, import_core.estimateTokens)(block.content)
116
+ }))
117
+ );
118
+ console.log(`Extracted ${allBlocks.length} code blocks for analysis`);
119
+ for (let i = 0; i < allBlocks.length; i++) {
120
+ for (let j = i + 1; j < allBlocks.length; j++) {
121
+ const block1 = allBlocks[i];
122
+ const block2 = allBlocks[j];
123
+ if (block1.file === block2.file) continue;
124
+ const similarity = calculateSimilarity(block1.content, block2.content);
125
+ if (similarity >= minSimilarity) {
126
+ duplicates.push({
127
+ file1: block1.file,
128
+ file2: block2.file,
129
+ line1: block1.startLine,
130
+ line2: block2.startLine,
131
+ similarity,
132
+ snippet: block1.content.split("\n").slice(0, 5).join("\n") + "\n...",
133
+ patternType: block1.patternType,
134
+ tokenCost: block1.tokenCost + block2.tokenCost,
135
+ linesOfCode: block1.linesOfCode
136
+ });
137
+ }
138
+ }
139
+ }
140
+ return duplicates.sort(
141
+ (a, b) => b.similarity - a.similarity || b.tokenCost - a.tokenCost
142
+ );
143
+ }
144
+
145
+ // src/index.ts
146
+ function getRefactoringSuggestion(patternType, similarity) {
147
+ const baseMessages = {
148
+ "api-handler": "Extract common middleware or create a base handler class",
149
+ validator: "Consolidate validation logic into shared schema validators (Zod/Yup)",
150
+ utility: "Move to a shared utilities file and reuse across modules",
151
+ "class-method": "Consider inheritance or composition to share behavior",
152
+ component: "Extract shared logic into a custom hook or HOC",
153
+ function: "Extract into a shared helper function",
154
+ unknown: "Extract common logic into a reusable module"
155
+ };
156
+ const urgency = similarity > 0.95 ? " (CRITICAL: Nearly identical code)" : similarity > 0.9 ? " (HIGH: Very similar, refactor soon)" : "";
157
+ return baseMessages[patternType] + urgency;
158
+ }
159
+ async function analyzePatterns(options) {
160
+ const { minSimilarity = 0.85, minLines = 5, ...scanOptions } = options;
161
+ const files = await (0, import_core2.scanFiles)(scanOptions);
162
+ const results = [];
163
+ const fileContents = await Promise.all(
164
+ files.map(async (file) => ({
165
+ file,
166
+ content: await (0, import_core2.readFileContent)(file)
167
+ }))
168
+ );
169
+ const duplicates = detectDuplicatePatterns(fileContents, {
170
+ minSimilarity,
171
+ minLines
172
+ });
173
+ for (const file of files) {
174
+ const fileDuplicates = duplicates.filter(
175
+ (dup) => dup.file1 === file || dup.file2 === file
176
+ );
177
+ const issues = fileDuplicates.map((dup) => {
178
+ const otherFile = dup.file1 === file ? dup.file2 : dup.file1;
179
+ const severity = dup.similarity > 0.95 ? "critical" : dup.similarity > 0.9 ? "major" : "minor";
180
+ return {
181
+ type: "duplicate-pattern",
182
+ severity,
183
+ message: `${dup.patternType} pattern ${Math.round(dup.similarity * 100)}% similar to ${otherFile} (${dup.tokenCost} tokens wasted)`,
184
+ location: {
185
+ file,
186
+ line: dup.file1 === file ? dup.line1 : dup.line2
187
+ },
188
+ suggestion: getRefactoringSuggestion(dup.patternType, dup.similarity)
189
+ };
190
+ });
191
+ const totalTokenCost = fileDuplicates.reduce(
192
+ (sum, dup) => sum + dup.tokenCost,
193
+ 0
194
+ );
195
+ results.push({
196
+ fileName: file,
197
+ issues,
198
+ metrics: {
199
+ tokenCost: totalTokenCost,
200
+ consistencyScore: Math.max(0, 1 - fileDuplicates.length * 0.1)
201
+ }
202
+ });
203
+ }
204
+ return results;
205
+ }
206
+ function generateSummary(results) {
207
+ const allIssues = results.flatMap((r) => r.issues);
208
+ const totalTokenCost = results.reduce(
209
+ (sum, r) => sum + (r.metrics.tokenCost || 0),
210
+ 0
211
+ );
212
+ const patternsByType = {
213
+ "api-handler": 0,
214
+ validator: 0,
215
+ utility: 0,
216
+ "class-method": 0,
217
+ component: 0,
218
+ function: 0,
219
+ unknown: 0
220
+ };
221
+ allIssues.forEach((issue) => {
222
+ const match = issue.message.match(/^(\S+(?:-\S+)*) pattern/);
223
+ if (match) {
224
+ const type = match[1];
225
+ patternsByType[type] = (patternsByType[type] || 0) + 1;
226
+ }
227
+ });
228
+ const topDuplicates = allIssues.slice(0, 10).map((issue) => {
229
+ const similarityMatch = issue.message.match(/(\d+)% similar/);
230
+ const tokenMatch = issue.message.match(/\((\d+) tokens/);
231
+ const typeMatch = issue.message.match(/^(\S+(?:-\S+)*) pattern/);
232
+ const fileMatch = issue.message.match(/similar to (.+?) \(/);
233
+ return {
234
+ file1: issue.location.file,
235
+ file2: fileMatch?.[1] || "unknown",
236
+ similarity: similarityMatch ? parseInt(similarityMatch[1]) / 100 : 0,
237
+ patternType: typeMatch?.[1] || "unknown",
238
+ tokenCost: tokenMatch ? parseInt(tokenMatch[1]) : 0
239
+ };
240
+ });
241
+ return {
242
+ totalPatterns: allIssues.length,
243
+ totalTokenCost,
244
+ patternsByType,
245
+ topDuplicates
246
+ };
247
+ }
248
+ // Annotate the CommonJS export names for ESM import in node:
249
+ 0 && (module.exports = {
250
+ analyzePatterns,
251
+ detectDuplicatePatterns,
252
+ generateSummary
253
+ });
package/dist/index.mjs ADDED
@@ -0,0 +1,10 @@
1
+ import {
2
+ analyzePatterns,
3
+ detectDuplicatePatterns,
4
+ generateSummary
5
+ } from "./chunk-RLWJXASG.mjs";
6
+ export {
7
+ analyzePatterns,
8
+ detectDuplicatePatterns,
9
+ generateSummary
10
+ };
package/package.json ADDED
@@ -0,0 +1,72 @@
1
+ {
2
+ "name": "@aiready/pattern-detect",
3
+ "version": "0.1.0",
4
+ "description": "Semantic duplicate pattern detection for AI-generated code - finds similar implementations that waste AI context tokens",
5
+ "main": "./dist/index.js",
6
+ "module": "./dist/index.mjs",
7
+ "types": "./dist/index.d.ts",
8
+ "bin": {
9
+ "aiready-patterns": "./dist/cli.js"
10
+ },
11
+ "exports": {
12
+ ".": {
13
+ "types": "./dist/index.d.ts",
14
+ "require": "./dist/index.js",
15
+ "import": "./dist/index.mjs"
16
+ }
17
+ },
18
+ "keywords": [
19
+ "aiready",
20
+ "duplicate-detection",
21
+ "semantic-analysis",
22
+ "code-patterns",
23
+ "ai-code",
24
+ "tech-debt",
25
+ "code-quality",
26
+ "refactoring",
27
+ "copilot",
28
+ "chatgpt",
29
+ "claude",
30
+ "ai-assisted-development",
31
+ "pattern-recognition",
32
+ "code-similarity",
33
+ "token-optimization"
34
+ ],
35
+ "author": "AIReady Team",
36
+ "license": "MIT",
37
+ "repository": {
38
+ "type": "git",
39
+ "url": "https://github.com/caopengau/aiready-pattern-detect.git"
40
+ },
41
+ "homepage": "https://github.com/caopengau/aiready-pattern-detect",
42
+ "bugs": {
43
+ "url": "https://github.com/caopengau/aiready-pattern-detect/issues"
44
+ },
45
+ "dependencies": {
46
+ "commander": "^12.1.0",
47
+ "chalk": "^5.3.0",
48
+ "@aiready/core": "0.1.0"
49
+ },
50
+ "devDependencies": {
51
+ "tsup": "^8.3.5",
52
+ "eslint": "^9.17.0"
53
+ },
54
+ "files": [
55
+ "dist",
56
+ "README.md",
57
+ "LICENSE"
58
+ ],
59
+ "engines": {
60
+ "node": ">=18.0.0"
61
+ },
62
+ "publishConfig": {
63
+ "access": "public"
64
+ },
65
+ "scripts": {
66
+ "build": "tsup src/index.ts src/cli.ts --format cjs,esm --dts",
67
+ "dev": "tsup src/index.ts src/cli.ts --format cjs,esm --dts --watch",
68
+ "test": "vitest run",
69
+ "lint": "eslint src",
70
+ "clean": "rm -rf dist"
71
+ }
72
+ }