@emeryld/manager 1.4.1 → 1.4.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
|
@@ -5,9 +5,9 @@ import { rootDir } from '../helper-cli/env.js';
|
|
|
5
5
|
export const REPORTING_MODES = ['group', 'file'];
|
|
6
6
|
export const DEFAULT_LIMITS = {
|
|
7
7
|
maxFunctionLength: 150,
|
|
8
|
-
maxMethodLength:
|
|
8
|
+
maxMethodLength: 150,
|
|
9
9
|
maxIndentationDepth: 6,
|
|
10
|
-
maxFunctionsPerFile:
|
|
10
|
+
maxFunctionsPerFile: 25,
|
|
11
11
|
maxComponentsPerFile: 8,
|
|
12
12
|
maxFileLength: 500,
|
|
13
13
|
maxDuplicateLineOccurrences: 3,
|
|
@@ -8,13 +8,14 @@ import * as ts from 'typescript';
|
|
|
8
8
|
export async function analyzeFiles(files, limits) {
|
|
9
9
|
const violations = [];
|
|
10
10
|
const duplicateMap = new Map();
|
|
11
|
+
const fileSnapshots = new Map();
|
|
11
12
|
for (const file of files) {
|
|
12
|
-
violations.push(...(await analyzeSingleFile(file, limits, duplicateMap)));
|
|
13
|
+
violations.push(...(await analyzeSingleFile(file, limits, duplicateMap, fileSnapshots)));
|
|
13
14
|
}
|
|
14
|
-
violations.push(...collectDuplicateViolations(duplicateMap, limits));
|
|
15
|
+
violations.push(...collectDuplicateViolations(duplicateMap, limits, fileSnapshots));
|
|
15
16
|
return violations;
|
|
16
17
|
}
|
|
17
|
-
async function analyzeSingleFile(filePath, limits, duplicates) {
|
|
18
|
+
async function analyzeSingleFile(filePath, limits, duplicates, fileSnapshots) {
|
|
18
19
|
let content;
|
|
19
20
|
try {
|
|
20
21
|
content = await readFile(filePath, 'utf-8');
|
|
@@ -48,7 +49,7 @@ async function analyzeSingleFile(filePath, limits, duplicates) {
|
|
|
48
49
|
}
|
|
49
50
|
});
|
|
50
51
|
const sourceFile = ts.createSourceFile(filePath, content, ts.ScriptTarget.ESNext, true, resolveScriptKind(filePath));
|
|
51
|
-
recordDuplicateLines(normalizedLines, filePath, limits, duplicates, sourceFile, lines);
|
|
52
|
+
recordDuplicateLines(normalizedLines, filePath, limits, duplicates, sourceFile, lines, fileSnapshots);
|
|
52
53
|
const indentationGroups = groupIndentationViolations(indentationViolations, lines, limits.maxIndentationDepth);
|
|
53
54
|
indentationGroups
|
|
54
55
|
.sort((a, b) => b.severity - a.severity)
|
|
@@ -1,8 +1,11 @@
|
|
|
1
1
|
import path from 'node:path';
|
|
2
2
|
import { getLineColumnRange } from './utils.js';
|
|
3
3
|
import { collectRequiredVariables } from './variables.js';
|
|
4
|
-
export function recordDuplicateLines(normalizedLines, filePath, limits, duplicates, sourceFile, lines) {
|
|
4
|
+
export function recordDuplicateLines(normalizedLines, filePath, limits, duplicates, sourceFile, lines, fileSnapshots) {
|
|
5
5
|
const minLines = Math.max(1, limits.minDuplicateLines);
|
|
6
|
+
const registerSnapshot = () => {
|
|
7
|
+
ensureFileSnapshot(fileSnapshots, filePath, normalizedLines, lines, sourceFile);
|
|
8
|
+
};
|
|
6
9
|
if (limits.minDuplicateLines <= 0 || minLines === 1) {
|
|
7
10
|
normalizedLines.forEach((entry, index) => {
|
|
8
11
|
if (!entry.normalized)
|
|
@@ -13,6 +16,7 @@ export function recordDuplicateLines(normalizedLines, filePath, limits, duplicat
|
|
|
13
16
|
if (entry.isImport)
|
|
14
17
|
return;
|
|
15
18
|
const { startColumn, endColumn } = getLineColumnRange(entry.raw);
|
|
19
|
+
registerSnapshot();
|
|
16
20
|
addDuplicateOccurrence(duplicates, entry.normalized, filePath, index + 1, startColumn, index + 1, endColumn, snippet, sourceFile, lines);
|
|
17
21
|
});
|
|
18
22
|
return;
|
|
@@ -48,6 +52,7 @@ export function recordDuplicateLines(normalizedLines, filePath, limits, duplicat
|
|
|
48
52
|
const endLine = i + 1 + lastNonEmptyIndex;
|
|
49
53
|
const { startColumn } = getLineColumnRange(slice[firstNonEmptyIndex].raw);
|
|
50
54
|
const { endColumn } = getLineColumnRange(slice[lastNonEmptyIndex].raw);
|
|
55
|
+
registerSnapshot();
|
|
51
56
|
addDuplicateOccurrence(duplicates, key, filePath, startLine, startColumn, endLine, endColumn, snippet, sourceFile, lines);
|
|
52
57
|
}
|
|
53
58
|
}
|
|
@@ -88,28 +93,147 @@ function addDuplicateOccurrence(duplicates, key, filePath, startLine, startColum
|
|
|
88
93
|
],
|
|
89
94
|
});
|
|
90
95
|
}
|
|
91
|
-
|
|
96
|
+
function ensureFileSnapshot(fileSnapshots, filePath, normalizedLines, lines, sourceFile) {
|
|
97
|
+
if (fileSnapshots.has(filePath))
|
|
98
|
+
return;
|
|
99
|
+
fileSnapshots.set(filePath, {
|
|
100
|
+
sourceFile,
|
|
101
|
+
normalizedLines,
|
|
102
|
+
lines,
|
|
103
|
+
});
|
|
104
|
+
}
|
|
105
|
+
function fingerprintLine(entry) {
|
|
106
|
+
if (entry.normalized)
|
|
107
|
+
return entry.normalized;
|
|
108
|
+
const collapsed = entry.trimmed.replace(/\s+/g, ' ');
|
|
109
|
+
if (!collapsed)
|
|
110
|
+
return undefined;
|
|
111
|
+
if (!/[A-Za-z0-9]/.test(collapsed))
|
|
112
|
+
return undefined;
|
|
113
|
+
return collapsed.toLowerCase();
|
|
114
|
+
}
|
|
115
|
+
function buildOccurrenceInfo(occurrence, fileSnapshots) {
|
|
116
|
+
const snapshot = fileSnapshots.get(occurrence.file);
|
|
117
|
+
if (!snapshot)
|
|
118
|
+
return undefined;
|
|
119
|
+
const totalLines = snapshot.normalizedLines.length;
|
|
120
|
+
if (totalLines === 0)
|
|
121
|
+
return undefined;
|
|
122
|
+
const clampIndex = (value) => Math.max(0, Math.min(totalLines - 1, value));
|
|
123
|
+
const rawStart = clampIndex(occurrence.line - 1);
|
|
124
|
+
const rawEnd = clampIndex(occurrence.endLine - 1);
|
|
125
|
+
return {
|
|
126
|
+
file: occurrence.file,
|
|
127
|
+
snapshot,
|
|
128
|
+
startIndex: Math.min(rawStart, rawEnd),
|
|
129
|
+
endIndex: Math.max(rawStart, rawEnd),
|
|
130
|
+
};
|
|
131
|
+
}
|
|
132
|
+
function expandDuplicateTracker(tracker, fileSnapshots) {
|
|
133
|
+
const infos = tracker.occurrences
|
|
134
|
+
.map((occurrence) => buildOccurrenceInfo(occurrence, fileSnapshots))
|
|
135
|
+
.filter((info) => Boolean(info));
|
|
136
|
+
if (infos.length < 2)
|
|
137
|
+
return undefined;
|
|
138
|
+
expandRangeAcrossOccurrences(infos);
|
|
139
|
+
const primary = infos[0];
|
|
140
|
+
const lines = primary.snapshot.normalizedLines
|
|
141
|
+
.slice(primary.startIndex, primary.endIndex + 1)
|
|
142
|
+
.map((entry) => entry.trimmed)
|
|
143
|
+
.filter(Boolean);
|
|
144
|
+
if (!lines.length)
|
|
145
|
+
return undefined;
|
|
146
|
+
const snippet = lines.map((line) => line.replace(/\s+/g, ' ')).join('\n');
|
|
147
|
+
const startLine = primary.startIndex + 1;
|
|
148
|
+
const endLine = primary.endIndex + 1;
|
|
149
|
+
const startColumn = getLineColumnRange(primary.snapshot.normalizedLines[primary.startIndex].raw).startColumn;
|
|
150
|
+
const endColumn = getLineColumnRange(primary.snapshot.normalizedLines[primary.endIndex].raw).endColumn;
|
|
151
|
+
const requiredVariables = collectRequiredVariables(primary.snapshot.sourceFile, primary.snapshot.lines, {
|
|
152
|
+
startLine,
|
|
153
|
+
startColumn,
|
|
154
|
+
endLine,
|
|
155
|
+
endColumn,
|
|
156
|
+
});
|
|
157
|
+
return {
|
|
158
|
+
snippet,
|
|
159
|
+
detail: infos.map(formatOccurrenceDetail).join('\n'),
|
|
160
|
+
requiredVariables,
|
|
161
|
+
};
|
|
162
|
+
}
|
|
163
|
+
function expandRangeAcrossOccurrences(infos) {
|
|
164
|
+
while (tryExpandRange(infos, 'left')) { }
|
|
165
|
+
while (tryExpandRange(infos, 'right')) { }
|
|
166
|
+
}
|
|
167
|
+
function tryExpandRange(infos, direction) {
|
|
168
|
+
const fingerprints = [];
|
|
169
|
+
for (const info of infos) {
|
|
170
|
+
const nextIndex = direction === 'left' ? info.startIndex - 1 : info.endIndex + 1;
|
|
171
|
+
if (nextIndex < 0 || nextIndex >= info.snapshot.normalizedLines.length) {
|
|
172
|
+
return false;
|
|
173
|
+
}
|
|
174
|
+
const fingerprint = fingerprintLine(info.snapshot.normalizedLines[nextIndex]);
|
|
175
|
+
if (!fingerprint)
|
|
176
|
+
return false;
|
|
177
|
+
fingerprints.push(fingerprint);
|
|
178
|
+
}
|
|
179
|
+
if (fingerprints.some((value) => value !== fingerprints[0])) {
|
|
180
|
+
return false;
|
|
181
|
+
}
|
|
182
|
+
for (const info of infos) {
|
|
183
|
+
if (direction === 'left') {
|
|
184
|
+
info.startIndex -= 1;
|
|
185
|
+
}
|
|
186
|
+
else {
|
|
187
|
+
info.endIndex += 1;
|
|
188
|
+
}
|
|
189
|
+
}
|
|
190
|
+
return true;
|
|
191
|
+
}
|
|
192
|
+
function formatOccurrenceDetail(info) {
|
|
193
|
+
const startLine = info.startIndex + 1;
|
|
194
|
+
const endLine = info.endIndex + 1;
|
|
195
|
+
const startEntry = info.snapshot.normalizedLines[info.startIndex];
|
|
196
|
+
const endEntry = info.snapshot.normalizedLines[info.endIndex];
|
|
197
|
+
const startColumn = startEntry
|
|
198
|
+
? getLineColumnRange(startEntry.raw).startColumn
|
|
199
|
+
: 1;
|
|
200
|
+
const endColumn = endEntry
|
|
201
|
+
? getLineColumnRange(endEntry.raw).endColumn
|
|
202
|
+
: 1;
|
|
203
|
+
const relativePath = path.relative(process.cwd(), info.file);
|
|
204
|
+
return `${relativePath}:${startLine}:${startColumn}/${endLine}:${endColumn}`;
|
|
205
|
+
}
|
|
206
|
+
export function collectDuplicateViolations(duplicates, limits, fileSnapshots) {
|
|
92
207
|
const violations = [];
|
|
208
|
+
const seenDuplicateKeys = new Set();
|
|
93
209
|
for (const tracker of duplicates.values()) {
|
|
94
210
|
if (tracker.count <= limits.maxDuplicateLineOccurrences)
|
|
95
211
|
continue;
|
|
96
212
|
if (tracker.occurrences.length === 0)
|
|
97
213
|
continue;
|
|
214
|
+
const expanded = expandDuplicateTracker(tracker, fileSnapshots);
|
|
215
|
+
const defaultDetail = tracker.occurrences
|
|
216
|
+
.map((occurrence) => {
|
|
217
|
+
const relativePath = path.relative(process.cwd(), occurrence.file);
|
|
218
|
+
return `${relativePath}:${occurrence.line}:${occurrence.startColumn}/${occurrence.endLine}:${occurrence.endColumn}`;
|
|
219
|
+
})
|
|
220
|
+
.join('\n');
|
|
221
|
+
const detailText = expanded?.detail ?? defaultDetail;
|
|
222
|
+
const snippetText = expanded?.snippet ?? tracker.snippet;
|
|
223
|
+
const dedupKey = `${snippetText}::${detailText}`;
|
|
224
|
+
if (seenDuplicateKeys.has(dedupKey))
|
|
225
|
+
continue;
|
|
226
|
+
seenDuplicateKeys.add(dedupKey);
|
|
98
227
|
violations.push({
|
|
99
228
|
type: 'duplicateLine',
|
|
100
229
|
file: tracker.occurrences[0].file,
|
|
101
230
|
line: tracker.occurrences[0].line,
|
|
102
231
|
severity: tracker.count - limits.maxDuplicateLineOccurrences,
|
|
103
232
|
message: `Repeated ${tracker.count} times (max ${limits.maxDuplicateLineOccurrences})`,
|
|
104
|
-
detail:
|
|
105
|
-
|
|
106
|
-
const relativePath = path.relative(process.cwd(), occurrence.file);
|
|
107
|
-
return `${relativePath}:${occurrence.line}:${occurrence.startColumn}/${occurrence.endLine}:${occurrence.endColumn}`;
|
|
108
|
-
})
|
|
109
|
-
.join('\n'),
|
|
110
|
-
snippet: tracker.snippet,
|
|
233
|
+
detail: detailText,
|
|
234
|
+
snippet: snippetText,
|
|
111
235
|
repeatCount: tracker.count,
|
|
112
|
-
requiredVariables: tracker.requiredVariables,
|
|
236
|
+
requiredVariables: expanded?.requiredVariables ?? tracker.requiredVariables,
|
|
113
237
|
});
|
|
114
238
|
}
|
|
115
239
|
return violations;
|
|
@@ -2,7 +2,9 @@ import path from 'node:path';
|
|
|
2
2
|
import * as ts from 'typescript';
|
|
3
3
|
export function normalizeLine(line) {
|
|
4
4
|
const trimmed = line.trim();
|
|
5
|
-
if (trimmed
|
|
5
|
+
if (!trimmed)
|
|
6
|
+
return undefined;
|
|
7
|
+
if (/^[};()]+$/.test(trimmed))
|
|
6
8
|
return undefined;
|
|
7
9
|
const collapse = trimmed.replace(/\s+/g, ' ');
|
|
8
10
|
const noNumbers = collapse.replace(/\d+/g, '#');
|