@aiready/pattern-detect 0.16.18 → 0.16.19

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.d.ts CHANGED
@@ -1,242 +1,14 @@
1
- import { ToolProvider, Severity, FileContent, ScanOptions, AnalysisResult, CostConfig, ToolScoringOutput } from '@aiready/core';
1
+ import { ToolProvider } from '@aiready/core';
2
2
  export { Severity } from '@aiready/core';
3
+ export { D as DuplicateGroup, P as PatternDetectOptions, a as PatternSummary, R as RefactorCluster, b as analyzePatterns, c as createRefactorClusters, f as filterClustersByImpact, g as generateSummary, d as getSmartDefaults, e as groupDuplicatesByFilePair } from './analyzer-entry-BwuoiCNm.js';
4
+ export { detectDuplicatePatterns } from './detector-entry.js';
5
+ export { calculatePatternScore } from './scoring-entry.js';
6
+ export { C as CONTEXT_RULES, a as ContextRule, c as calculateSeverity, f as filterBySeverity, g as getSeverityLabel, b as getSeverityThreshold } from './context-rules-entry-y2uJSngh.js';
7
+ export { D as DuplicatePattern, P as PatternType } from './types-DU2mmhwb.js';
3
8
 
4
9
  /**
5
10
  * Pattern Detection Tool Provider
6
11
  */
7
12
  declare const PatternDetectProvider: ToolProvider;
8
13
 
9
- type PatternType = 'api-handler' | 'validator' | 'utility' | 'class-method' | 'component' | 'function' | 'unknown';
10
- interface DuplicatePattern {
11
- file1: string;
12
- line1: number;
13
- endLine1: number;
14
- file2: string;
15
- line2: number;
16
- endLine2: number;
17
- code1: string;
18
- code2: string;
19
- similarity: number;
20
- confidence: number;
21
- patternType: PatternType;
22
- tokenCost: number;
23
- severity: Severity;
24
- reason?: string;
25
- suggestion?: string;
26
- matchedRule?: string;
27
- }
28
- interface DetectionOptions {
29
- minSimilarity: number;
30
- minLines: number;
31
- batchSize: number;
32
- approx: boolean;
33
- minSharedTokens: number;
34
- maxCandidatesPerBlock: number;
35
- streamResults: boolean;
36
- excludePatterns?: string[];
37
- confidenceThreshold?: number;
38
- ignoreWhitelist?: string[];
39
- onProgress?: (processed: number, total: number, message: string) => void;
40
- }
41
-
42
- /**
43
- * Detect duplicate patterns across files
44
- *
45
- * @param fileContents - Array of file contents to analyze.
46
- * @param options - Configuration for duplicate detection (thresholds, progress, etc).
47
- * @returns Promise resolving to an array of detected duplicate patterns sorted by similarity.
48
- */
49
- declare function detectDuplicatePatterns(fileContents: FileContent[], options: DetectionOptions): Promise<DuplicatePattern[]>;
50
-
51
- interface DuplicateGroup {
52
- filePair: string;
53
- severity: Severity;
54
- occurrences: number;
55
- totalTokenCost: number;
56
- averageSimilarity: number;
57
- patternTypes: Set<PatternType>;
58
- lineRanges: Array<{
59
- file1: {
60
- start: number;
61
- end: number;
62
- };
63
- file2: {
64
- start: number;
65
- end: number;
66
- };
67
- }>;
68
- }
69
- interface RefactorCluster {
70
- id: string;
71
- name: string;
72
- files: string[];
73
- severity: Severity;
74
- duplicateCount: number;
75
- totalTokenCost: number;
76
- averageSimilarity: number;
77
- reason?: string;
78
- suggestion?: string;
79
- }
80
- /**
81
- * Group raw duplicates by file pairs to reduce noise
82
- */
83
- declare function groupDuplicatesByFilePair(duplicates: DuplicatePattern[]): DuplicateGroup[];
84
- /**
85
- * Create clusters of highly related files (refactor targets)
86
- * Uses a simple connected components algorithm
87
- * @param duplicates - Array of duplicate patterns to cluster
88
- * @returns Array of refactor clusters
89
- */
90
- declare function createRefactorClusters(duplicates: DuplicatePattern[]): RefactorCluster[];
91
- /**
92
- * Filter clusters by impact threshold
93
- * @param clusters - Array of refactor clusters to filter
94
- * @param minTokenCost - Minimum token cost threshold (default: 1000)
95
- * @param minFiles - Minimum number of files in cluster (default: 3)
96
- * @returns Filtered array of refactor clusters
97
- */
98
- declare function filterClustersByImpact(clusters: RefactorCluster[], minTokenCost?: number, minFiles?: number): RefactorCluster[];
99
-
100
- interface PatternDetectOptions extends ScanOptions {
101
- minSimilarity?: number;
102
- minLines?: number;
103
- batchSize?: number;
104
- approx?: boolean;
105
- minSharedTokens?: number;
106
- maxCandidatesPerBlock?: number;
107
- streamResults?: boolean;
108
- severity?: string;
109
- includeTests?: boolean;
110
- useSmartDefaults?: boolean;
111
- groupByFilePair?: boolean;
112
- createClusters?: boolean;
113
- minClusterTokenCost?: number;
114
- minClusterFiles?: number;
115
- excludePatterns?: string[];
116
- confidenceThreshold?: number;
117
- ignoreWhitelist?: string[];
118
- onProgress?: (processed: number, total: number, message: string) => void;
119
- }
120
- interface PatternSummary {
121
- totalPatterns: number;
122
- totalTokenCost: number;
123
- patternsByType: Record<PatternType, number>;
124
- topDuplicates: Array<{
125
- files: Array<{
126
- path: string;
127
- startLine: number;
128
- endLine: number;
129
- }>;
130
- similarity: number;
131
- patternType: PatternType;
132
- tokenCost: number;
133
- }>;
134
- }
135
- /**
136
- * Determine smart defaults based on repository size estimation.
137
- *
138
- * @param directory - The directory to analyze for size.
139
- * @param userOptions - User-provided option overrides.
140
- * @returns Promise resolving to optimal detection options.
141
- */
142
- declare function getSmartDefaults(directory: string, userOptions: Partial<PatternDetectOptions>): Promise<PatternDetectOptions>;
143
- /**
144
- * Main entry point for pattern detection analysis.
145
- *
146
- * @param options - Configuration including rootDir and detection parameters.
147
- * @returns Promise resolving to the comprehensive pattern detect report.
148
- * @lastUpdated 2026-03-18
149
- */
150
- declare function analyzePatterns(options: PatternDetectOptions): Promise<{
151
- results: AnalysisResult[];
152
- duplicates: DuplicatePattern[];
153
- files: string[];
154
- groups?: DuplicateGroup[];
155
- clusters?: RefactorCluster[];
156
- config: PatternDetectOptions;
157
- }>;
158
- /**
159
- * Generate a summary of pattern detection results.
160
- *
161
- * @param results - Array of file-level analysis results.
162
- * @returns Consolidated pattern summary object.
163
- */
164
- declare function generateSummary(results: AnalysisResult[]): PatternSummary;
165
-
166
- /**
167
- * Calculate AI Readiness Score for pattern duplication (0-100)
168
- *
169
- * Based on:
170
- * - Number of duplicates per file
171
- * - Token waste per file
172
- * - High-impact duplicates (>1000 tokens or >70% similarity)
173
- *
174
- * Includes business value metrics:
175
- * - Estimated monthly cost of token waste
176
- * - Estimated developer hours to fix
177
- *
178
- * @param duplicates - Array of detected duplicate patterns.
179
- * @param totalFilesAnalyzed - Total count of files scanned.
180
- * @param costConfig - Optional configuration for business value calculations.
181
- * @returns Standardized scoring output for pattern detection.
182
- */
183
- declare function calculatePatternScore(duplicates: DuplicatePattern[], totalFilesAnalyzed: number, costConfig?: Partial<CostConfig>): ToolScoringOutput;
184
-
185
- /**
186
- * Context-aware severity detection for duplicate patterns
187
- * Identifies intentional duplication patterns and adjusts severity accordingly
188
- */
189
- interface ContextRule {
190
- name: string;
191
- detect: (file: string, code: string) => boolean;
192
- severity: Severity;
193
- reason: string;
194
- suggestion?: string;
195
- }
196
- /**
197
- * Context rules for detecting intentional or acceptable duplication patterns
198
- * Rules are checked in order - first match wins
199
- */
200
- declare const CONTEXT_RULES: ContextRule[];
201
- /**
202
- * Calculate severity based on context rules and code characteristics
203
- *
204
- * @param file1 - First file path in the duplicate pair.
205
- * @param file2 - Second file path in the duplicate pair.
206
- * @param code - Snippet of the duplicated code.
207
- * @param similarity - The calculated similarity score (0-1).
208
- * @param linesOfCode - Number of lines in the duplicated block.
209
- * @returns An object containing the severity level and reasoning.
210
- */
211
- declare function calculateSeverity(file1: string, file2: string, code: string, similarity: number, linesOfCode: number): {
212
- severity: Severity;
213
- reason?: string;
214
- suggestion?: string;
215
- matchedRule?: string;
216
- };
217
- /**
218
- * Get a human-readable severity label with emoji
219
- *
220
- * @param severity - The severity level to label.
221
- * @returns Formatted label string for UI display.
222
- */
223
- declare function getSeverityLabel(severity: Severity): string;
224
- /**
225
- * Filter duplicates by minimum severity threshold
226
- *
227
- * @param duplicates - List of items with a severity property.
228
- * @param minSeverity - Minimum threshold for inclusion.
229
- * @returns Filtered list of items.
230
- */
231
- declare function filterBySeverity<T extends {
232
- severity: Severity;
233
- }>(duplicates: T[], minSeverity: Severity): T[];
234
- /**
235
- * Get numerical similarity threshold associated with a severity level
236
- *
237
- * @param severity - The severity level to look up.
238
- * @returns Minimum similarity value for this severity.
239
- */
240
- declare function getSeverityThreshold(severity: Severity): number;
241
-
242
- export { CONTEXT_RULES, type ContextRule, type DuplicateGroup, type DuplicatePattern, type PatternDetectOptions, PatternDetectProvider, type PatternSummary, type PatternType, type RefactorCluster, analyzePatterns, calculatePatternScore, calculateSeverity, createRefactorClusters, detectDuplicatePatterns, filterBySeverity, filterClustersByImpact, generateSummary, getSeverityLabel, getSeverityThreshold, getSmartDefaults, groupDuplicatesByFilePair };
14
+ export { PatternDetectProvider };
package/dist/index.mjs CHANGED
@@ -1,20 +1,28 @@
1
1
  import {
2
- CONTEXT_RULES,
3
2
  PatternDetectProvider,
4
- Severity,
3
+ Severity
4
+ } from "./chunk-UB3CGOQ7.mjs";
5
+ import {
5
6
  analyzePatterns,
6
- calculatePatternScore,
7
- calculateSeverity,
8
7
  createRefactorClusters,
9
- detectDuplicatePatterns,
10
- filterBySeverity,
11
8
  filterClustersByImpact,
12
9
  generateSummary,
13
- getSeverityLabel,
14
- getSeverityThreshold,
15
10
  getSmartDefaults,
16
11
  groupDuplicatesByFilePair
17
- } from "./chunk-BUBQ3W6W.mjs";
12
+ } from "./chunk-WMOGJFME.mjs";
13
+ import {
14
+ detectDuplicatePatterns
15
+ } from "./chunk-THF4RW63.mjs";
16
+ import {
17
+ CONTEXT_RULES,
18
+ calculateSeverity,
19
+ filterBySeverity,
20
+ getSeverityLabel,
21
+ getSeverityThreshold
22
+ } from "./chunk-I6ETJC7L.mjs";
23
+ import {
24
+ calculatePatternScore
25
+ } from "./chunk-WBBO35SC.mjs";
18
26
  export {
19
27
  CONTEXT_RULES,
20
28
  PatternDetectProvider,
@@ -0,0 +1,23 @@
1
+ import { CostConfig, ToolScoringOutput } from '@aiready/core';
2
+ import { D as DuplicatePattern } from './types-DU2mmhwb.mjs';
3
+
4
+ /**
5
+ * Calculate AI Readiness Score for pattern duplication (0-100)
6
+ *
7
+ * Based on:
8
+ * - Number of duplicates per file
9
+ * - Token waste per file
10
+ * - High-impact duplicates (>1000 tokens or >70% similarity)
11
+ *
12
+ * Includes business value metrics:
13
+ * - Estimated monthly cost of token waste
14
+ * - Estimated developer hours to fix
15
+ *
16
+ * @param duplicates - Array of detected duplicate patterns.
17
+ * @param totalFilesAnalyzed - Total count of files scanned.
18
+ * @param costConfig - Optional configuration for business value calculations.
19
+ * @returns Standardized scoring output for pattern detection.
20
+ */
21
+ declare function calculatePatternScore(duplicates: DuplicatePattern[], totalFilesAnalyzed: number, costConfig?: Partial<CostConfig>): ToolScoringOutput;
22
+
23
+ export { calculatePatternScore };
@@ -0,0 +1,23 @@
1
+ import { CostConfig, ToolScoringOutput } from '@aiready/core';
2
+ import { D as DuplicatePattern } from './types-DU2mmhwb.js';
3
+
4
+ /**
5
+ * Calculate AI Readiness Score for pattern duplication (0-100)
6
+ *
7
+ * Based on:
8
+ * - Number of duplicates per file
9
+ * - Token waste per file
10
+ * - High-impact duplicates (>1000 tokens or >70% similarity)
11
+ *
12
+ * Includes business value metrics:
13
+ * - Estimated monthly cost of token waste
14
+ * - Estimated developer hours to fix
15
+ *
16
+ * @param duplicates - Array of detected duplicate patterns.
17
+ * @param totalFilesAnalyzed - Total count of files scanned.
18
+ * @param costConfig - Optional configuration for business value calculations.
19
+ * @returns Standardized scoring output for pattern detection.
20
+ */
21
+ declare function calculatePatternScore(duplicates: DuplicatePattern[], totalFilesAnalyzed: number, costConfig?: Partial<CostConfig>): ToolScoringOutput;
22
+
23
+ export { calculatePatternScore };
@@ -0,0 +1,133 @@
1
+ "use strict";
2
+ var __defProp = Object.defineProperty;
3
+ var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
4
+ var __getOwnPropNames = Object.getOwnPropertyNames;
5
+ var __hasOwnProp = Object.prototype.hasOwnProperty;
6
+ var __export = (target, all) => {
7
+ for (var name in all)
8
+ __defProp(target, name, { get: all[name], enumerable: true });
9
+ };
10
+ var __copyProps = (to, from, except, desc) => {
11
+ if (from && typeof from === "object" || typeof from === "function") {
12
+ for (let key of __getOwnPropNames(from))
13
+ if (!__hasOwnProp.call(to, key) && key !== except)
14
+ __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
15
+ }
16
+ return to;
17
+ };
18
+ var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
19
+
20
+ // src/scoring-entry.ts
21
+ var scoring_entry_exports = {};
22
+ __export(scoring_entry_exports, {
23
+ calculatePatternScore: () => calculatePatternScore
24
+ });
25
+ module.exports = __toCommonJS(scoring_entry_exports);
26
+
27
+ // src/scoring.ts
28
+ var import_core = require("@aiready/core");
29
+ function calculatePatternScore(duplicates, totalFilesAnalyzed, costConfig) {
30
+ const totalDuplicates = duplicates.length;
31
+ const totalTokenCost = duplicates.reduce((sum, d) => sum + d.tokenCost, 0);
32
+ const highImpactDuplicates = duplicates.filter(
33
+ (d) => d.tokenCost > 1e3 || d.similarity > 0.7
34
+ ).length;
35
+ if (totalFilesAnalyzed === 0) {
36
+ return {
37
+ toolName: import_core.ToolName.PatternDetect,
38
+ score: 100,
39
+ rawMetrics: {
40
+ totalDuplicates: 0,
41
+ totalTokenCost: 0,
42
+ highImpactDuplicates: 0,
43
+ totalFilesAnalyzed: 0
44
+ },
45
+ factors: [],
46
+ recommendations: []
47
+ };
48
+ }
49
+ const duplicatesPerFile = totalDuplicates / totalFilesAnalyzed * 100;
50
+ const tokenWastePerFile = totalTokenCost / totalFilesAnalyzed;
51
+ const duplicatesPenalty = Math.min(60, duplicatesPerFile * 0.6);
52
+ const tokenPenalty = Math.min(40, tokenWastePerFile / 125);
53
+ const highImpactPenalty = highImpactDuplicates > 0 ? Math.min(15, highImpactDuplicates * 2 - 5) : -5;
54
+ const score = 100 - duplicatesPenalty - tokenPenalty - highImpactPenalty;
55
+ const finalScore = Math.max(0, Math.min(100, Math.round(score)));
56
+ const factors = [
57
+ {
58
+ name: "Duplication Density",
59
+ impact: -Math.round(duplicatesPenalty),
60
+ description: `${duplicatesPerFile.toFixed(1)} duplicates per 100 files`
61
+ },
62
+ {
63
+ name: "Token Waste",
64
+ impact: -Math.round(tokenPenalty),
65
+ description: `${Math.round(tokenWastePerFile)} tokens wasted per file`
66
+ }
67
+ ];
68
+ if (highImpactDuplicates > 0) {
69
+ factors.push({
70
+ name: "High-Impact Patterns",
71
+ impact: -Math.round(highImpactPenalty),
72
+ description: `${highImpactDuplicates} high-impact duplicates (>1000 tokens or >70% similar)`
73
+ });
74
+ } else {
75
+ factors.push({
76
+ name: "No High-Impact Patterns",
77
+ impact: 5,
78
+ description: "No severe duplicates detected"
79
+ });
80
+ }
81
+ const recommendations = [];
82
+ if (highImpactDuplicates > 0) {
83
+ const estimatedImpact = Math.min(15, highImpactDuplicates * 3);
84
+ recommendations.push({
85
+ action: `Deduplicate ${highImpactDuplicates} high-impact pattern${highImpactDuplicates > 1 ? "s" : ""}`,
86
+ estimatedImpact,
87
+ priority: "high"
88
+ });
89
+ }
90
+ if (totalDuplicates > 10 && duplicatesPerFile > 20) {
91
+ const estimatedImpact = Math.min(10, Math.round(duplicatesPenalty * 0.3));
92
+ recommendations.push({
93
+ action: "Extract common patterns into shared utilities",
94
+ estimatedImpact,
95
+ priority: "medium"
96
+ });
97
+ }
98
+ if (tokenWastePerFile > 2e3) {
99
+ const estimatedImpact = Math.min(8, Math.round(tokenPenalty * 0.4));
100
+ recommendations.push({
101
+ action: "Consolidate duplicated logic to reduce AI context waste",
102
+ estimatedImpact,
103
+ priority: totalTokenCost > 1e4 ? "high" : "medium"
104
+ });
105
+ }
106
+ const cfg = { ...import_core.DEFAULT_COST_CONFIG, ...costConfig };
107
+ const estimatedMonthlyCost = (0, import_core.calculateMonthlyCost)(totalTokenCost, cfg);
108
+ const issues = duplicates.map((d) => ({
109
+ severity: d.severity === "critical" ? "critical" : d.severity === "major" ? "major" : "minor"
110
+ }));
111
+ const productivityImpact = (0, import_core.calculateProductivityImpact)(issues);
112
+ return {
113
+ toolName: "pattern-detect",
114
+ score: finalScore,
115
+ rawMetrics: {
116
+ totalDuplicates,
117
+ totalTokenCost,
118
+ highImpactDuplicates,
119
+ totalFilesAnalyzed,
120
+ duplicatesPerFile: Math.round(duplicatesPerFile * 10) / 10,
121
+ tokenWastePerFile: Math.round(tokenWastePerFile),
122
+ // Business value metrics
123
+ estimatedMonthlyCost,
124
+ estimatedDeveloperHours: productivityImpact.totalHours
125
+ },
126
+ factors,
127
+ recommendations
128
+ };
129
+ }
130
+ // Annotate the CommonJS export names for ESM import in node:
131
+ 0 && (module.exports = {
132
+ calculatePatternScore
133
+ });
@@ -0,0 +1,6 @@
1
+ import {
2
+ calculatePatternScore
3
+ } from "./chunk-WBBO35SC.mjs";
4
+ export {
5
+ calculatePatternScore
6
+ };
@@ -0,0 +1,36 @@
1
+ import { Severity } from '@aiready/core';
2
+
3
+ type PatternType = 'api-handler' | 'validator' | 'utility' | 'class-method' | 'component' | 'function' | 'unknown';
4
+ interface DuplicatePattern {
5
+ file1: string;
6
+ line1: number;
7
+ endLine1: number;
8
+ file2: string;
9
+ line2: number;
10
+ endLine2: number;
11
+ code1: string;
12
+ code2: string;
13
+ similarity: number;
14
+ confidence: number;
15
+ patternType: PatternType;
16
+ tokenCost: number;
17
+ severity: Severity;
18
+ reason?: string;
19
+ suggestion?: string;
20
+ matchedRule?: string;
21
+ }
22
+ interface DetectionOptions {
23
+ minSimilarity: number;
24
+ minLines: number;
25
+ batchSize: number;
26
+ approx: boolean;
27
+ minSharedTokens: number;
28
+ maxCandidatesPerBlock: number;
29
+ streamResults: boolean;
30
+ excludePatterns?: string[];
31
+ confidenceThreshold?: number;
32
+ ignoreWhitelist?: string[];
33
+ onProgress?: (processed: number, total: number, message: string) => void;
34
+ }
35
+
36
+ export type { DuplicatePattern as D, PatternType as P, DetectionOptions as a };
@@ -0,0 +1,36 @@
1
+ import { Severity } from '@aiready/core';
2
+
3
+ type PatternType = 'api-handler' | 'validator' | 'utility' | 'class-method' | 'component' | 'function' | 'unknown';
4
+ interface DuplicatePattern {
5
+ file1: string;
6
+ line1: number;
7
+ endLine1: number;
8
+ file2: string;
9
+ line2: number;
10
+ endLine2: number;
11
+ code1: string;
12
+ code2: string;
13
+ similarity: number;
14
+ confidence: number;
15
+ patternType: PatternType;
16
+ tokenCost: number;
17
+ severity: Severity;
18
+ reason?: string;
19
+ suggestion?: string;
20
+ matchedRule?: string;
21
+ }
22
+ interface DetectionOptions {
23
+ minSimilarity: number;
24
+ minLines: number;
25
+ batchSize: number;
26
+ approx: boolean;
27
+ minSharedTokens: number;
28
+ maxCandidatesPerBlock: number;
29
+ streamResults: boolean;
30
+ excludePatterns?: string[];
31
+ confidenceThreshold?: number;
32
+ ignoreWhitelist?: string[];
33
+ onProgress?: (processed: number, total: number, message: string) => void;
34
+ }
35
+
36
+ export type { DuplicatePattern as D, PatternType as P, DetectionOptions as a };
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@aiready/pattern-detect",
3
- "version": "0.16.18",
3
+ "version": "0.16.19",
4
4
  "description": "Semantic duplicate pattern detection for AI-generated code - finds similar implementations that waste AI context tokens",
5
5
  "main": "./dist/index.js",
6
6
  "module": "./dist/index.mjs",
@@ -13,6 +13,26 @@
13
13
  "types": "./dist/index.d.ts",
14
14
  "require": "./dist/index.js",
15
15
  "import": "./dist/index.mjs"
16
+ },
17
+ "./analyzer": {
18
+ "types": "./dist/analyzer-entry.d.ts",
19
+ "require": "./dist/analyzer-entry.js",
20
+ "import": "./dist/analyzer-entry.mjs"
21
+ },
22
+ "./detector": {
23
+ "types": "./dist/detector-entry.d.ts",
24
+ "require": "./dist/detector-entry.js",
25
+ "import": "./dist/detector-entry.mjs"
26
+ },
27
+ "./scoring": {
28
+ "types": "./dist/scoring-entry.d.ts",
29
+ "require": "./dist/scoring-entry.js",
30
+ "import": "./dist/scoring-entry.mjs"
31
+ },
32
+ "./context-rules": {
33
+ "types": "./dist/context-rules-entry.d.ts",
34
+ "require": "./dist/context-rules-entry.js",
35
+ "import": "./dist/context-rules-entry.mjs"
16
36
  }
17
37
  },
18
38
  "keywords": [
@@ -45,7 +65,7 @@
45
65
  "dependencies": {
46
66
  "commander": "^14.0.0",
47
67
  "chalk": "^5.3.0",
48
- "@aiready/core": "0.23.19"
68
+ "@aiready/core": "0.23.20"
49
69
  },
50
70
  "devDependencies": {
51
71
  "tsup": "^8.3.5",
@@ -64,8 +84,8 @@
64
84
  "access": "public"
65
85
  },
66
86
  "scripts": {
67
- "build": "tsup src/index.ts src/cli.ts --format cjs,esm --dts",
68
- "dev": "tsup src/index.ts src/cli.ts --format cjs,esm --dts --watch",
87
+ "build": "tsup src/index.ts src/cli.ts src/analyzer-entry.ts src/detector-entry.ts src/scoring-entry.ts src/context-rules-entry.ts --format cjs,esm --dts",
88
+ "dev": "tsup src/index.ts src/cli.ts src/analyzer-entry.ts src/detector-entry.ts src/scoring-entry.ts src/context-rules-entry.ts --format cjs,esm --dts --watch",
69
89
  "test": "vitest run",
70
90
  "lint": "eslint src",
71
91
  "clean": "rm -rf dist",