@aiready/pattern-detect 0.16.2 → 0.16.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/chunk-4UHDGB7U.mjs +920 -0
- package/dist/chunk-KPEK5REL.mjs +919 -0
- package/dist/cli.js +118 -134
- package/dist/cli.mjs +115 -121
- package/dist/index.d.mts +7 -5
- package/dist/index.d.ts +7 -5
- package/dist/index.js +3 -11
- package/dist/index.mjs +1 -1
- package/package.json +2 -2
package/dist/index.d.mts
CHANGED
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import { ToolProvider, Severity, ScanOptions, AnalysisResult, CostConfig, ToolScoringOutput } from '@aiready/core';
|
|
1
|
+
import { ToolProvider, Severity, FileContent, ScanOptions, AnalysisResult, CostConfig, ToolScoringOutput } from '@aiready/core';
|
|
2
2
|
export { Severity } from '@aiready/core';
|
|
3
3
|
|
|
4
4
|
/**
|
|
@@ -24,10 +24,6 @@ interface DuplicatePattern {
|
|
|
24
24
|
suggestion?: string;
|
|
25
25
|
matchedRule?: string;
|
|
26
26
|
}
|
|
27
|
-
interface FileContent {
|
|
28
|
-
file: string;
|
|
29
|
-
content: string;
|
|
30
|
-
}
|
|
31
27
|
interface DetectionOptions {
|
|
32
28
|
minSimilarity: number;
|
|
33
29
|
minLines: number;
|
|
@@ -80,10 +76,16 @@ declare function groupDuplicatesByFilePair(duplicates: DuplicatePattern[]): Dupl
|
|
|
80
76
|
/**
|
|
81
77
|
* Create clusters of highly related files (refactor targets)
|
|
82
78
|
* Uses a simple connected components algorithm
|
|
79
|
+
* @param duplicates - Array of duplicate patterns to cluster
|
|
80
|
+
* @returns Array of refactor clusters
|
|
83
81
|
*/
|
|
84
82
|
declare function createRefactorClusters(duplicates: DuplicatePattern[]): RefactorCluster[];
|
|
85
83
|
/**
|
|
86
84
|
* Filter clusters by impact threshold
|
|
85
|
+
* @param clusters - Array of refactor clusters to filter
|
|
86
|
+
* @param minTokenCost - Minimum token cost threshold (default: 1000)
|
|
87
|
+
* @param minFiles - Minimum number of files in cluster (default: 3)
|
|
88
|
+
* @returns Filtered array of refactor clusters
|
|
87
89
|
*/
|
|
88
90
|
declare function filterClustersByImpact(clusters: RefactorCluster[], minTokenCost?: number, minFiles?: number): RefactorCluster[];
|
|
89
91
|
|
package/dist/index.d.ts
CHANGED
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import { ToolProvider, Severity, ScanOptions, AnalysisResult, CostConfig, ToolScoringOutput } from '@aiready/core';
|
|
1
|
+
import { ToolProvider, Severity, FileContent, ScanOptions, AnalysisResult, CostConfig, ToolScoringOutput } from '@aiready/core';
|
|
2
2
|
export { Severity } from '@aiready/core';
|
|
3
3
|
|
|
4
4
|
/**
|
|
@@ -24,10 +24,6 @@ interface DuplicatePattern {
|
|
|
24
24
|
suggestion?: string;
|
|
25
25
|
matchedRule?: string;
|
|
26
26
|
}
|
|
27
|
-
interface FileContent {
|
|
28
|
-
file: string;
|
|
29
|
-
content: string;
|
|
30
|
-
}
|
|
31
27
|
interface DetectionOptions {
|
|
32
28
|
minSimilarity: number;
|
|
33
29
|
minLines: number;
|
|
@@ -80,10 +76,16 @@ declare function groupDuplicatesByFilePair(duplicates: DuplicatePattern[]): Dupl
|
|
|
80
76
|
/**
|
|
81
77
|
* Create clusters of highly related files (refactor targets)
|
|
82
78
|
* Uses a simple connected components algorithm
|
|
79
|
+
* @param duplicates - Array of duplicate patterns to cluster
|
|
80
|
+
* @returns Array of refactor clusters
|
|
83
81
|
*/
|
|
84
82
|
declare function createRefactorClusters(duplicates: DuplicatePattern[]): RefactorCluster[];
|
|
85
83
|
/**
|
|
86
84
|
* Filter clusters by impact threshold
|
|
85
|
+
* @param clusters - Array of refactor clusters to filter
|
|
86
|
+
* @param minTokenCost - Minimum token cost threshold (default: 1000)
|
|
87
|
+
* @param minFiles - Minimum number of files in cluster (default: 3)
|
|
88
|
+
* @returns Filtered array of refactor clusters
|
|
87
89
|
*/
|
|
88
90
|
declare function filterClustersByImpact(clusters: RefactorCluster[], minTokenCost?: number, minFiles?: number): RefactorCluster[];
|
|
89
91
|
|
package/dist/index.js
CHANGED
|
@@ -235,7 +235,7 @@ function extractBlocks(file, content) {
|
|
|
235
235
|
}
|
|
236
236
|
const blocks = [];
|
|
237
237
|
const lines = content.split("\n");
|
|
238
|
-
const blockRegex = /^\s*(?:export\s+)?(?:async\s+)?(?:public\s+|private\s+|protected\s+|internal\s+|static\s+|readonly\s+|virtual\s+|abstract\s+|override\s+)*(function|class|interface|type|enum|record|struct|void|func|[a-zA-Z0-9_
|
|
238
|
+
const blockRegex = /^\s*(?:export\s+)?(?:async\s+)?(?:public\s+|private\s+|protected\s+|internal\s+|static\s+|readonly\s+|virtual\s+|abstract\s+|override\s+)*(function|class|interface|type|enum|record|struct|void|func|[a-zA-Z0-9_<>[]]+)\s+([a-zA-Z0-9_]+)(?:\s*\(|(?:\s+extends|\s+implements|\s+where)?\s*\{)|^\s*(?:export\s+)?const\s+([a-zA-Z0-9_]+)\s*=\s*[a-zA-Z0-9_.]+\.object\(|^\s*(app\.(?:get|post|put|delete|patch|use))\(/gm;
|
|
239
239
|
let match;
|
|
240
240
|
while ((match = blockRegex.exec(content)) !== null) {
|
|
241
241
|
const startLine = content.substring(0, match.index).split("\n").length;
|
|
@@ -437,13 +437,6 @@ async function detectDuplicatePatterns(fileContents, options) {
|
|
|
437
437
|
// src/grouping.ts
|
|
438
438
|
var import_core3 = require("@aiready/core");
|
|
439
439
|
var import_path = __toESM(require("path"));
|
|
440
|
-
function getSeverityLevel(s) {
|
|
441
|
-
if (s === import_core3.Severity.Critical || s === "critical") return 4;
|
|
442
|
-
if (s === import_core3.Severity.Major || s === "major") return 3;
|
|
443
|
-
if (s === import_core3.Severity.Minor || s === "minor") return 2;
|
|
444
|
-
if (s === import_core3.Severity.Info || s === "info") return 1;
|
|
445
|
-
return 0;
|
|
446
|
-
}
|
|
447
440
|
function groupDuplicatesByFilePair(duplicates) {
|
|
448
441
|
const groups = /* @__PURE__ */ new Map();
|
|
449
442
|
for (const dup of duplicates) {
|
|
@@ -470,7 +463,7 @@ function groupDuplicatesByFilePair(duplicates) {
|
|
|
470
463
|
file2: { start: dup.line2, end: dup.endLine2 }
|
|
471
464
|
});
|
|
472
465
|
const currentSev = dup.severity;
|
|
473
|
-
if (getSeverityLevel(currentSev) > getSeverityLevel(group.severity)) {
|
|
466
|
+
if ((0, import_core3.getSeverityLevel)(currentSev) > (0, import_core3.getSeverityLevel)(group.severity)) {
|
|
474
467
|
group.severity = currentSev;
|
|
475
468
|
}
|
|
476
469
|
}
|
|
@@ -622,7 +615,7 @@ async function getSmartDefaults(directory, userOptions) {
|
|
|
622
615
|
includeTests: false
|
|
623
616
|
};
|
|
624
617
|
const result = { ...defaults };
|
|
625
|
-
for (const
|
|
618
|
+
for (const key of Object.keys(defaults)) {
|
|
626
619
|
if (key in userOptions && userOptions[key] !== void 0) {
|
|
627
620
|
result[key] = userOptions[key];
|
|
628
621
|
}
|
|
@@ -654,7 +647,6 @@ async function analyzePatterns(options) {
|
|
|
654
647
|
maxCandidatesPerBlock = 100,
|
|
655
648
|
streamResults = false,
|
|
656
649
|
severity = "all",
|
|
657
|
-
includeTests = false,
|
|
658
650
|
groupByFilePair = true,
|
|
659
651
|
createClusters = true,
|
|
660
652
|
minClusterTokenCost = 1e3,
|
package/dist/index.mjs
CHANGED
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@aiready/pattern-detect",
|
|
3
|
-
"version": "0.16.
|
|
3
|
+
"version": "0.16.3",
|
|
4
4
|
"description": "Semantic duplicate pattern detection for AI-generated code - finds similar implementations that waste AI context tokens",
|
|
5
5
|
"main": "./dist/index.js",
|
|
6
6
|
"module": "./dist/index.mjs",
|
|
@@ -45,7 +45,7 @@
|
|
|
45
45
|
"dependencies": {
|
|
46
46
|
"commander": "^14.0.0",
|
|
47
47
|
"chalk": "^5.3.0",
|
|
48
|
-
"@aiready/core": "0.23.
|
|
48
|
+
"@aiready/core": "0.23.3"
|
|
49
49
|
},
|
|
50
50
|
"devDependencies": {
|
|
51
51
|
"tsup": "^8.3.5",
|