@aiready/pattern-detect 0.17.15 → 0.17.16

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (65) hide show
  1. package/dist/analyzer-entry/index.d.mts +2 -2
  2. package/dist/analyzer-entry/index.d.ts +2 -2
  3. package/dist/analyzer-entry/index.js +357 -140
  4. package/dist/analyzer-entry/index.mjs +4 -4
  5. package/dist/chunk-3LMYFYWG.mjs +514 -0
  6. package/dist/chunk-4YXKUW4P.mjs +143 -0
  7. package/dist/chunk-5A3ULAQ5.mjs +571 -0
  8. package/dist/chunk-5FACKJ7M.mjs +519 -0
  9. package/dist/chunk-6B72OWZA.mjs +143 -0
  10. package/dist/chunk-6SHBBRHF.mjs +600 -0
  11. package/dist/chunk-BKSIA7A2.mjs +516 -0
  12. package/dist/chunk-CM5YJR7G.mjs +516 -0
  13. package/dist/chunk-FSXOU23F.mjs +620 -0
  14. package/dist/chunk-GUYQI3AF.mjs +514 -0
  15. package/dist/chunk-H2TGXGMX.mjs +587 -0
  16. package/dist/chunk-KMAOEVRS.mjs +150 -0
  17. package/dist/chunk-NWG2ZIGX.mjs +146 -0
  18. package/dist/chunk-OFVJFGQW.mjs +514 -0
  19. package/dist/chunk-PCCZREHY.mjs +143 -0
  20. package/dist/chunk-PQS5ACTN.mjs +516 -0
  21. package/dist/chunk-TVE75IDM.mjs +143 -0
  22. package/dist/chunk-UDOGQ42Q.mjs +603 -0
  23. package/dist/chunk-UFI4UDQI.mjs +514 -0
  24. package/dist/chunk-UXV57HN3.mjs +144 -0
  25. package/dist/chunk-VC2BOV6R.mjs +143 -0
  26. package/dist/chunk-VI2OVG73.mjs +514 -0
  27. package/dist/chunk-VKGYNHFY.mjs +514 -0
  28. package/dist/chunk-WBLZYAQ2.mjs +518 -0
  29. package/dist/chunk-WFVXMMB3.mjs +143 -0
  30. package/dist/chunk-WQC43BIO.mjs +516 -0
  31. package/dist/chunk-WTAIM3SG.mjs +146 -0
  32. package/dist/chunk-XC7U55PE.mjs +514 -0
  33. package/dist/chunk-XR373Q6G.mjs +146 -0
  34. package/dist/chunk-XWIBTD67.mjs +620 -0
  35. package/dist/chunk-YUQ2VQVJ.mjs +514 -0
  36. package/dist/chunk-Z4NOH52X.mjs +143 -0
  37. package/dist/cli.js +357 -140
  38. package/dist/cli.mjs +4 -4
  39. package/dist/context-rules-entry/index.js +351 -139
  40. package/dist/context-rules-entry/index.mjs +1 -1
  41. package/dist/detector-entry/index.d.mts +2 -2
  42. package/dist/detector-entry/index.d.ts +2 -2
  43. package/dist/detector-entry/index.js +355 -140
  44. package/dist/detector-entry/index.mjs +2 -2
  45. package/dist/index-BGvkJ9j1.d.mts +136 -0
  46. package/dist/index-BJq32qmj.d.mts +137 -0
  47. package/dist/index-BpoJSgX-.d.mts +136 -0
  48. package/dist/index-C7qLPKmH.d.ts +150 -0
  49. package/dist/index-CThnG9hv.d.ts +155 -0
  50. package/dist/index-D0Hpg9nN.d.mts +150 -0
  51. package/dist/index-DN6XpBOW.d.mts +155 -0
  52. package/dist/index-F8xqZ2PS.d.ts +136 -0
  53. package/dist/index-HNhDr6CV.d.ts +137 -0
  54. package/dist/index-ux0Wo8Ps.d.ts +136 -0
  55. package/dist/index.d.mts +2 -2
  56. package/dist/index.d.ts +2 -2
  57. package/dist/index.js +359 -142
  58. package/dist/index.mjs +4 -4
  59. package/dist/scoring-entry/index.d.mts +1 -1
  60. package/dist/scoring-entry/index.d.ts +1 -1
  61. package/dist/scoring-entry/index.js +2 -2
  62. package/dist/scoring-entry/index.mjs +1 -1
  63. package/dist/types-tgrmUrHE.d.mts +37 -0
  64. package/dist/types-tgrmUrHE.d.ts +37 -0
  65. package/package.json +5 -3
@@ -0,0 +1,136 @@
1
+ import { Severity, ScanOptions, Issue, AnalysisResult } from '@aiready/core';
2
+ import { P as PatternType, D as DuplicatePattern } from './types-tgrmUrHE.mjs';
3
+
4
+ interface DuplicateGroup {
5
+ filePair: string;
6
+ severity: Severity;
7
+ occurrences: number;
8
+ totalTokenCost: number;
9
+ averageSimilarity: number;
10
+ patternTypes: Set<PatternType>;
11
+ lineRanges: Array<{
12
+ file1: {
13
+ start: number;
14
+ end: number;
15
+ };
16
+ file2: {
17
+ start: number;
18
+ end: number;
19
+ };
20
+ }>;
21
+ }
22
+ interface RefactorCluster {
23
+ id: string;
24
+ name: string;
25
+ files: string[];
26
+ severity: Severity;
27
+ duplicateCount: number;
28
+ totalTokenCost: number;
29
+ averageSimilarity: number;
30
+ reason?: string;
31
+ suggestion?: string;
32
+ }
33
+ /**
34
+ * Group raw duplicates by file pairs to reduce noise
35
+ */
36
+ declare function groupDuplicatesByFilePair(duplicates: DuplicatePattern[]): DuplicateGroup[];
37
+ /**
38
+ * Create clusters of highly related files (refactor targets)
39
+ * Uses a simple connected components algorithm
40
+ * @param duplicates - Array of duplicate patterns to cluster
41
+ * @returns Array of refactor clusters
42
+ */
43
+ declare function createRefactorClusters(duplicates: DuplicatePattern[]): RefactorCluster[];
44
+ /**
45
+ * Filter clusters by impact threshold
46
+ * @param clusters - Array of refactor clusters to filter
47
+ * @param minTokenCost - Minimum token cost threshold (default: 1000)
48
+ * @param minFiles - Minimum number of files in cluster (default: 3)
49
+ * @returns Filtered array of refactor clusters
50
+ */
51
+ declare function filterClustersByImpact(clusters: RefactorCluster[], minTokenCost?: number, minFiles?: number): RefactorCluster[];
52
+ /**
53
+ * Detect if two duplicate files are likely brand-specific variants
54
+ * (e.g., different themed versions of the same UI component)
55
+ */
56
+ declare function areBrandSpecificVariants(file1: string, file2: string, code1: string, code2: string): boolean;
57
+ /**
58
+ * Filter out brand-specific variants from duplicates
59
+ */
60
+ declare function filterBrandSpecificVariants(duplicates: DuplicatePattern[]): DuplicatePattern[];
61
+
62
+ interface PatternDetectOptions extends ScanOptions {
63
+ minSimilarity?: number;
64
+ minLines?: number;
65
+ batchSize?: number;
66
+ approx?: boolean;
67
+ minSharedTokens?: number;
68
+ maxCandidatesPerBlock?: number;
69
+ streamResults?: boolean;
70
+ useSmartDefaults?: boolean;
71
+ groupByFilePair?: boolean;
72
+ createClusters?: boolean;
73
+ minClusterTokenCost?: number;
74
+ minClusterFiles?: number;
75
+ excludePatterns?: string[];
76
+ confidenceThreshold?: number;
77
+ ignoreWhitelist?: string[];
78
+ onProgress?: (processed: number, total: number, message: string) => void;
79
+ }
80
+ /**
81
+ * Determine smart defaults based on repository size estimation.
82
+ */
83
+ declare function getSmartDefaults(directory: string, userOptions: Partial<PatternDetectOptions>): Promise<PatternDetectOptions>;
84
+ /**
85
+ * Log current configuration settings to the console.
86
+ */
87
+ declare function logConfiguration(config: PatternDetectOptions, estimatedBlocks: number): void;
88
+
89
+ interface PatternSummary {
90
+ totalPatterns: number;
91
+ totalTokenCost: number;
92
+ patternsByType: Record<PatternType, number>;
93
+ topDuplicates: Array<{
94
+ files: Array<{
95
+ path: string;
96
+ startLine: number;
97
+ endLine: number;
98
+ }>;
99
+ similarity: number;
100
+ patternType: PatternType;
101
+ tokenCost: number;
102
+ }>;
103
+ }
104
+ /**
105
+ * Generate a summary of pattern detection results.
106
+ */
107
+ declare function generateSummary(results: AnalysisResult[]): PatternSummary;
108
+ /**
109
+ * Filter issues by severity level.
110
+ */
111
+ declare function filterBySeverity(issues: Issue[], severity: string): Issue[];
112
+ /**
113
+ * Get human-readable label for severity.
114
+ */
115
+ declare function getSeverityLabel(severity: Severity): string;
116
+ /**
117
+ * Calculate severity based on similarity.
118
+ */
119
+ declare function calculateSeverity(similarity: number): Severity;
120
+
121
+ /**
122
+ * Main entry point for pattern detection analysis.
123
+ *
124
+ * @param options - Configuration including rootDir and detection parameters.
125
+ * @returns Promise resolving to the comprehensive pattern detect report.
126
+ */
127
+ declare function analyzePatterns(options: PatternDetectOptions): Promise<{
128
+ results: AnalysisResult[];
129
+ duplicates: DuplicatePattern[];
130
+ files: string[];
131
+ groups?: DuplicateGroup[];
132
+ clusters?: RefactorCluster[];
133
+ config: PatternDetectOptions;
134
+ }>;
135
+
136
+ export { type DuplicateGroup as D, type PatternDetectOptions as P, type RefactorCluster as R, type PatternSummary as a, analyzePatterns as b, areBrandSpecificVariants as c, calculateSeverity as d, createRefactorClusters as e, filterBrandSpecificVariants as f, filterBySeverity as g, filterClustersByImpact as h, generateSummary as i, getSeverityLabel as j, getSmartDefaults as k, groupDuplicatesByFilePair as l, logConfiguration as m };
@@ -0,0 +1,137 @@
1
+ import { Severity, ScanOptions, Issue, AnalysisResult } from '@aiready/core';
2
+ import { P as PatternType, D as DuplicatePattern } from './types-tgrmUrHE.mjs';
3
+
4
+ interface DuplicateGroup {
5
+ filePair: string;
6
+ severity: Severity;
7
+ occurrences: number;
8
+ totalTokenCost: number;
9
+ averageSimilarity: number;
10
+ patternTypes: Set<PatternType>;
11
+ lineRanges: Array<{
12
+ file1: {
13
+ start: number;
14
+ end: number;
15
+ };
16
+ file2: {
17
+ start: number;
18
+ end: number;
19
+ };
20
+ }>;
21
+ }
22
+ interface RefactorCluster {
23
+ id: string;
24
+ name: string;
25
+ files: string[];
26
+ severity: Severity;
27
+ duplicateCount: number;
28
+ totalTokenCost: number;
29
+ averageSimilarity: number;
30
+ reason?: string;
31
+ suggestion?: string;
32
+ }
33
+ /**
34
+ * Group raw duplicates by file pairs to reduce noise
35
+ */
36
+ declare function groupDuplicatesByFilePair(duplicates: DuplicatePattern[]): DuplicateGroup[];
37
+ /**
38
+ * Create clusters of highly related files (refactor targets)
39
+ * Uses a simple connected components algorithm
40
+ * @param duplicates - Array of duplicate patterns to cluster
41
+ * @returns Array of refactor clusters
42
+ */
43
+ declare function createRefactorClusters(duplicates: DuplicatePattern[]): RefactorCluster[];
44
+ /**
45
+ * Filter clusters by impact threshold
46
+ * @param clusters - Array of refactor clusters to filter
47
+ * @param minTokenCost - Minimum token cost threshold (default: 1000)
48
+ * @param minFiles - Minimum number of files in cluster (default: 3)
49
+ * @returns Filtered array of refactor clusters
50
+ */
51
+ declare function filterClustersByImpact(clusters: RefactorCluster[], minTokenCost?: number, minFiles?: number): RefactorCluster[];
52
+ /**
53
+ * Detect if two duplicate files are likely brand-specific variants
54
+ * (e.g., different themed versions of the same UI component)
55
+ */
56
+ declare function areBrandSpecificVariants(file1: string, file2: string, code1: string, code2: string): boolean;
57
+ /**
58
+ * Filter out brand-specific variants from duplicates
59
+ */
60
+ declare function filterBrandSpecificVariants(duplicates: DuplicatePattern[]): DuplicatePattern[];
61
+
62
+ interface PatternDetectOptions extends ScanOptions {
63
+ minSimilarity?: number;
64
+ minLines?: number;
65
+ batchSize?: number;
66
+ approx?: boolean;
67
+ minSharedTokens?: number;
68
+ maxCandidatesPerBlock?: number;
69
+ streamResults?: boolean;
70
+ useSmartDefaults?: boolean;
71
+ groupByFilePair?: boolean;
72
+ createClusters?: boolean;
73
+ minClusterTokenCost?: number;
74
+ minClusterFiles?: number;
75
+ excludePatterns?: string[];
76
+ excludeFiles?: string[];
77
+ confidenceThreshold?: number;
78
+ ignoreWhitelist?: string[];
79
+ onProgress?: (processed: number, total: number, message: string) => void;
80
+ }
81
+ /**
82
+ * Determine smart defaults based on repository size estimation.
83
+ */
84
+ declare function getSmartDefaults(directory: string, userOptions: Partial<PatternDetectOptions>): Promise<PatternDetectOptions>;
85
+ /**
86
+ * Log current configuration settings to the console.
87
+ */
88
+ declare function logConfiguration(config: PatternDetectOptions, estimatedBlocks: number): void;
89
+
90
+ interface PatternSummary {
91
+ totalPatterns: number;
92
+ totalTokenCost: number;
93
+ patternsByType: Record<PatternType, number>;
94
+ topDuplicates: Array<{
95
+ files: Array<{
96
+ path: string;
97
+ startLine: number;
98
+ endLine: number;
99
+ }>;
100
+ similarity: number;
101
+ patternType: PatternType;
102
+ tokenCost: number;
103
+ }>;
104
+ }
105
+ /**
106
+ * Generate a summary of pattern detection results.
107
+ */
108
+ declare function generateSummary(results: AnalysisResult[]): PatternSummary;
109
+ /**
110
+ * Filter issues by severity level.
111
+ */
112
+ declare function filterBySeverity(issues: Issue[], severity: string): Issue[];
113
+ /**
114
+ * Get human-readable label for severity.
115
+ */
116
+ declare function getSeverityLabel(severity: Severity): string;
117
+ /**
118
+ * Calculate severity based on similarity.
119
+ */
120
+ declare function calculateSeverity(similarity: number): Severity;
121
+
122
+ /**
123
+ * Main entry point for pattern detection analysis.
124
+ *
125
+ * @param options - Configuration including rootDir and detection parameters.
126
+ * @returns Promise resolving to the comprehensive pattern detect report.
127
+ */
128
+ declare function analyzePatterns(options: PatternDetectOptions): Promise<{
129
+ results: AnalysisResult[];
130
+ duplicates: DuplicatePattern[];
131
+ files: string[];
132
+ groups?: DuplicateGroup[];
133
+ clusters?: RefactorCluster[];
134
+ config: PatternDetectOptions;
135
+ }>;
136
+
137
+ export { type DuplicateGroup as D, type PatternDetectOptions as P, type RefactorCluster as R, type PatternSummary as a, analyzePatterns as b, areBrandSpecificVariants as c, calculateSeverity as d, createRefactorClusters as e, filterBrandSpecificVariants as f, filterBySeverity as g, filterClustersByImpact as h, generateSummary as i, getSeverityLabel as j, getSmartDefaults as k, groupDuplicatesByFilePair as l, logConfiguration as m };
@@ -0,0 +1,136 @@
1
+ import { Severity, ScanOptions, Issue, AnalysisResult } from '@aiready/core';
2
+ import { P as PatternType, D as DuplicatePattern } from './types-DU2mmhwb.mjs';
3
+
4
+ interface DuplicateGroup {
5
+ filePair: string;
6
+ severity: Severity;
7
+ occurrences: number;
8
+ totalTokenCost: number;
9
+ averageSimilarity: number;
10
+ patternTypes: Set<PatternType>;
11
+ lineRanges: Array<{
12
+ file1: {
13
+ start: number;
14
+ end: number;
15
+ };
16
+ file2: {
17
+ start: number;
18
+ end: number;
19
+ };
20
+ }>;
21
+ }
22
+ interface RefactorCluster {
23
+ id: string;
24
+ name: string;
25
+ files: string[];
26
+ severity: Severity;
27
+ duplicateCount: number;
28
+ totalTokenCost: number;
29
+ averageSimilarity: number;
30
+ reason?: string;
31
+ suggestion?: string;
32
+ }
33
+ /**
34
+ * Group raw duplicates by file pairs to reduce noise
35
+ */
36
+ declare function groupDuplicatesByFilePair(duplicates: DuplicatePattern[]): DuplicateGroup[];
37
+ /**
38
+ * Create clusters of highly related files (refactor targets)
39
+ * Uses a simple connected components algorithm
40
+ * @param duplicates - Array of duplicate patterns to cluster
41
+ * @returns Array of refactor clusters
42
+ */
43
+ declare function createRefactorClusters(duplicates: DuplicatePattern[]): RefactorCluster[];
44
+ /**
45
+ * Filter clusters by impact threshold
46
+ * @param clusters - Array of refactor clusters to filter
47
+ * @param minTokenCost - Minimum token cost threshold (default: 1000)
48
+ * @param minFiles - Minimum number of files in cluster (default: 3)
49
+ * @returns Filtered array of refactor clusters
50
+ */
51
+ declare function filterClustersByImpact(clusters: RefactorCluster[], minTokenCost?: number, minFiles?: number): RefactorCluster[];
52
+ /**
53
+ * Detect if two duplicate files are likely brand-specific variants
54
+ * (e.g., different themed versions of the same UI component)
55
+ */
56
+ declare function areBrandSpecificVariants(file1: string, file2: string, code1: string, code2: string): boolean;
57
+ /**
58
+ * Filter out brand-specific variants from duplicates
59
+ */
60
+ declare function filterBrandSpecificVariants(duplicates: DuplicatePattern[]): DuplicatePattern[];
61
+
62
+ interface PatternDetectOptions extends ScanOptions {
63
+ minSimilarity?: number;
64
+ minLines?: number;
65
+ batchSize?: number;
66
+ approx?: boolean;
67
+ minSharedTokens?: number;
68
+ maxCandidatesPerBlock?: number;
69
+ streamResults?: boolean;
70
+ useSmartDefaults?: boolean;
71
+ groupByFilePair?: boolean;
72
+ createClusters?: boolean;
73
+ minClusterTokenCost?: number;
74
+ minClusterFiles?: number;
75
+ excludePatterns?: string[];
76
+ confidenceThreshold?: number;
77
+ ignoreWhitelist?: string[];
78
+ onProgress?: (processed: number, total: number, message: string) => void;
79
+ }
80
+ /**
81
+ * Determine smart defaults based on repository size estimation.
82
+ */
83
+ declare function getSmartDefaults(directory: string, userOptions: Partial<PatternDetectOptions>): Promise<PatternDetectOptions>;
84
+ /**
85
+ * Log current configuration settings to the console.
86
+ */
87
+ declare function logConfiguration(config: PatternDetectOptions, estimatedBlocks: number): void;
88
+
89
+ interface PatternSummary {
90
+ totalPatterns: number;
91
+ totalTokenCost: number;
92
+ patternsByType: Record<PatternType, number>;
93
+ topDuplicates: Array<{
94
+ files: Array<{
95
+ path: string;
96
+ startLine: number;
97
+ endLine: number;
98
+ }>;
99
+ similarity: number;
100
+ patternType: PatternType;
101
+ tokenCost: number;
102
+ }>;
103
+ }
104
+ /**
105
+ * Generate a summary of pattern detection results.
106
+ */
107
+ declare function generateSummary(results: AnalysisResult[]): PatternSummary;
108
+ /**
109
+ * Filter issues by severity level.
110
+ */
111
+ declare function filterBySeverity(issues: Issue[], severity: string): Issue[];
112
+ /**
113
+ * Get human-readable label for severity.
114
+ */
115
+ declare function getSeverityLabel(severity: Severity): string;
116
+ /**
117
+ * Calculate severity based on similarity.
118
+ */
119
+ declare function calculateSeverity(similarity: number): Severity;
120
+
121
+ /**
122
+ * Main entry point for pattern detection analysis.
123
+ *
124
+ * @param options - Configuration including rootDir and detection parameters.
125
+ * @returns Promise resolving to the comprehensive pattern detect report.
126
+ */
127
+ declare function analyzePatterns(options: PatternDetectOptions): Promise<{
128
+ results: AnalysisResult[];
129
+ duplicates: DuplicatePattern[];
130
+ files: string[];
131
+ groups?: DuplicateGroup[];
132
+ clusters?: RefactorCluster[];
133
+ config: PatternDetectOptions;
134
+ }>;
135
+
136
+ export { type DuplicateGroup as D, type PatternDetectOptions as P, type RefactorCluster as R, type PatternSummary as a, analyzePatterns as b, areBrandSpecificVariants as c, calculateSeverity as d, createRefactorClusters as e, filterBrandSpecificVariants as f, filterBySeverity as g, filterClustersByImpact as h, generateSummary as i, getSeverityLabel as j, getSmartDefaults as k, groupDuplicatesByFilePair as l, logConfiguration as m };
@@ -0,0 +1,150 @@
1
+ import { Severity, ScanOptions, Issue, AnalysisResult } from '@aiready/core';
2
+ import { P as PatternType, D as DuplicatePattern } from './types-tgrmUrHE.js';
3
+
4
+ interface DuplicateGroup {
5
+ filePair: string;
6
+ severity: Severity;
7
+ occurrences: number;
8
+ totalTokenCost: number;
9
+ averageSimilarity: number;
10
+ patternTypes: Set<PatternType>;
11
+ lineRanges: Array<{
12
+ file1: {
13
+ start: number;
14
+ end: number;
15
+ };
16
+ file2: {
17
+ start: number;
18
+ end: number;
19
+ };
20
+ }>;
21
+ }
22
+ interface RefactorCluster {
23
+ id: string;
24
+ name: string;
25
+ files: string[];
26
+ severity: Severity;
27
+ duplicateCount: number;
28
+ totalTokenCost: number;
29
+ averageSimilarity: number;
30
+ reason?: string;
31
+ suggestion?: string;
32
+ }
33
+ /**
34
+ * Group raw duplicates by file pairs to reduce noise
35
+ */
36
+ declare function groupDuplicatesByFilePair(duplicates: DuplicatePattern[]): DuplicateGroup[];
37
+ /**
38
+ * Create clusters of highly related files (refactor targets)
39
+ * Uses a simple connected components algorithm
40
+ * @param duplicates - Array of duplicate patterns to cluster
41
+ * @returns Array of refactor clusters
42
+ */
43
+ declare function createRefactorClusters(duplicates: DuplicatePattern[]): RefactorCluster[];
44
+ /**
45
+ * Filter clusters by impact threshold
46
+ * @param clusters - Array of refactor clusters to filter
47
+ * @param minTokenCost - Minimum token cost threshold (default: 1000)
48
+ * @param minFiles - Minimum number of files in cluster (default: 3)
49
+ * @returns Filtered array of refactor clusters
50
+ */
51
+ declare function filterClustersByImpact(clusters: RefactorCluster[], minTokenCost?: number, minFiles?: number): RefactorCluster[];
52
+ /**
53
+ * Detect if two duplicate files are likely brand-specific variants
54
+ * (e.g., different themed versions of the same UI component)
55
+ */
56
+ declare function areBrandSpecificVariants(file1: string, file2: string, code1: string, code2: string): boolean;
57
+ /**
58
+ * Filter out brand-specific variants from duplicates
59
+ */
60
+ declare function filterBrandSpecificVariants(duplicates: DuplicatePattern[]): DuplicatePattern[];
61
+
62
+ interface PatternDetectOptions extends ScanOptions {
63
+ minSimilarity?: number;
64
+ minLines?: number;
65
+ batchSize?: number;
66
+ approx?: boolean;
67
+ minSharedTokens?: number;
68
+ maxCandidatesPerBlock?: number;
69
+ streamResults?: boolean;
70
+ useSmartDefaults?: boolean;
71
+ groupByFilePair?: boolean;
72
+ createClusters?: boolean;
73
+ minClusterTokenCost?: number;
74
+ minClusterFiles?: number;
75
+ excludePatterns?: string[];
76
+ excludeFiles?: string[];
77
+ confidenceThreshold?: number;
78
+ ignoreWhitelist?: string[];
79
+ onProgress?: (processed: number, total: number, message: string) => void;
80
+ }
81
+ /**
82
+ * Determine smart defaults based on repository size estimation.
83
+ */
84
+ declare function getSmartDefaults(directory: string, userOptions: Partial<PatternDetectOptions>): Promise<PatternDetectOptions>;
85
+ /**
86
+ * Log current configuration settings to the console.
87
+ */
88
+ declare function logConfiguration(config: PatternDetectOptions, estimatedBlocks: number): void;
89
+
90
+ interface PatternSummary {
91
+ totalPatterns: number;
92
+ totalTokenCost: number;
93
+ patternsByType: Record<PatternType, number>;
94
+ topDuplicates: Array<{
95
+ files: Array<{
96
+ path: string;
97
+ startLine: number;
98
+ endLine: number;
99
+ }>;
100
+ similarity: number;
101
+ patternType: PatternType;
102
+ tokenCost: number;
103
+ }>;
104
+ }
105
+ /**
106
+ * Generate a summary of pattern detection results.
107
+ *
108
+ * @param results - Array of raw analysis results from the detector
109
+ * @returns Structured pattern summary and top duplicates
110
+ */
111
+ declare function generateSummary(results: AnalysisResult[]): PatternSummary;
112
+ /**
113
+ * Filter issues by severity level.
114
+ *
115
+ * @param issues - List of detected issues
116
+ * @param severity - Severity filter: 'all' | 'critical' | 'high' | 'medium'
117
+ * @returns Filtered list of issues
118
+ */
119
+ declare function filterBySeverity(issues: Issue[], severity: string): Issue[];
120
+ /**
121
+ * Get human-readable label for severity.
122
+ *
123
+ * @param severity - The core severity type
124
+ * @returns String representation: 'CRITICAL' | 'HIGH' | 'MEDIUM' | 'LOW'
125
+ */
126
+ declare function getSeverityLabel(severity: Severity): string;
127
+ /**
128
+ * Calculate severity based on similarity.
129
+ *
130
+ * @param similarity - Similarity score from 0 to 1
131
+ * @returns The calculated core severity level
132
+ */
133
+ declare function calculateSeverity(similarity: number): Severity;
134
+
135
+ /**
136
+ * Main entry point for pattern detection analysis.
137
+ *
138
+ * @param options - Configuration including rootDir and detection parameters.
139
+ * @returns Promise resolving to the comprehensive pattern detect report.
140
+ */
141
+ declare function analyzePatterns(options: PatternDetectOptions): Promise<{
142
+ results: AnalysisResult[];
143
+ duplicates: DuplicatePattern[];
144
+ files: string[];
145
+ groups?: DuplicateGroup[];
146
+ clusters?: RefactorCluster[];
147
+ config: PatternDetectOptions;
148
+ }>;
149
+
150
+ export { type DuplicateGroup as D, type PatternDetectOptions as P, type RefactorCluster as R, type PatternSummary as a, analyzePatterns as b, areBrandSpecificVariants as c, calculateSeverity as d, createRefactorClusters as e, filterBrandSpecificVariants as f, filterBySeverity as g, filterClustersByImpact as h, generateSummary as i, getSeverityLabel as j, getSmartDefaults as k, groupDuplicatesByFilePair as l, logConfiguration as m };