scene-capability-engine 3.6.48 → 3.6.50
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +15 -0
- package/README.md +4 -2
- package/README.zh.md +4 -2
- package/docs/command-reference.md +4 -3
- package/docs/multi-agent-coordination-guide.md +15 -15
- package/docs/releases/v3.6.50.md +21 -0
- package/docs/steering-strategy-guide.md +1 -1
- package/docs/zh/releases/v3.6.50.md +21 -0
- package/lib/collab/multi-agent-config.js +4 -4
- package/lib/commands/timeline.js +20 -3
- package/lib/steering/adoption-config.js +3 -3
- package/lib/workspace/collab-governance-gate.js +65 -0
- package/lib/workspace/takeover-baseline.js +40 -1
- package/package.json +8 -2
- package/scripts/collab-governance-gate.js +76 -0
- package/scripts/refactor-trigger-audit.js +357 -0
- package/scripts/release-doc-version-audit.js +317 -0
- package/template/.sce/README.md +10 -10
- package/template/.sce/config/multi-agent.json +8 -0
- package/template/.sce/config/takeover-baseline.json +11 -0
- package/template/.sce/steering/CORE_PRINCIPLES.md +8 -0
|
@@ -0,0 +1,357 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
'use strict';
|
|
3
|
+
|
|
4
|
+
const fs = require('fs');
|
|
5
|
+
const path = require('path');
|
|
6
|
+
|
|
7
|
+
const CODE_EXTENSIONS = new Set([
|
|
8
|
+
'.js', '.cjs', '.mjs', '.jsx',
|
|
9
|
+
'.ts', '.tsx',
|
|
10
|
+
'.py',
|
|
11
|
+
'.java',
|
|
12
|
+
'.go',
|
|
13
|
+
'.rb',
|
|
14
|
+
'.php',
|
|
15
|
+
'.cs'
|
|
16
|
+
]);
|
|
17
|
+
|
|
18
|
+
const DEFAULT_SCAN_DIRS = [
|
|
19
|
+
'src',
|
|
20
|
+
'lib',
|
|
21
|
+
'scripts',
|
|
22
|
+
'bin',
|
|
23
|
+
'app',
|
|
24
|
+
'server',
|
|
25
|
+
'client',
|
|
26
|
+
'packages',
|
|
27
|
+
'tests'
|
|
28
|
+
];
|
|
29
|
+
|
|
30
|
+
const SKIP_DIRS = new Set([
|
|
31
|
+
'.git',
|
|
32
|
+
'.hg',
|
|
33
|
+
'.svn',
|
|
34
|
+
'node_modules',
|
|
35
|
+
'dist',
|
|
36
|
+
'build',
|
|
37
|
+
'coverage',
|
|
38
|
+
'.next',
|
|
39
|
+
'.turbo',
|
|
40
|
+
'.idea',
|
|
41
|
+
'.vscode',
|
|
42
|
+
'.sce/reports'
|
|
43
|
+
]);
|
|
44
|
+
|
|
45
|
+
const THRESHOLD_PROFILES = Object.freeze({
|
|
46
|
+
source: {
|
|
47
|
+
floors: {
|
|
48
|
+
assessment: 800,
|
|
49
|
+
refactor: 1800
|
|
50
|
+
},
|
|
51
|
+
defaults: {
|
|
52
|
+
assessment: 2000,
|
|
53
|
+
refactor: 4000,
|
|
54
|
+
redline: 10000
|
|
55
|
+
}
|
|
56
|
+
},
|
|
57
|
+
test: {
|
|
58
|
+
floors: {
|
|
59
|
+
assessment: 1200,
|
|
60
|
+
refactor: 3000
|
|
61
|
+
},
|
|
62
|
+
defaults: {
|
|
63
|
+
assessment: 3000,
|
|
64
|
+
refactor: 6000,
|
|
65
|
+
redline: 15000
|
|
66
|
+
}
|
|
67
|
+
}
|
|
68
|
+
});
|
|
69
|
+
|
|
70
|
+
function parseArgs(argv = process.argv.slice(2)) {
|
|
71
|
+
const options = {
|
|
72
|
+
projectPath: process.cwd(),
|
|
73
|
+
json: false,
|
|
74
|
+
failOnRedline: false,
|
|
75
|
+
out: null,
|
|
76
|
+
scanDirs: DEFAULT_SCAN_DIRS.slice()
|
|
77
|
+
};
|
|
78
|
+
|
|
79
|
+
for (let index = 0; index < argv.length; index += 1) {
|
|
80
|
+
const token = argv[index];
|
|
81
|
+
const next = argv[index + 1];
|
|
82
|
+
if (token === '--project-path' && next) {
|
|
83
|
+
options.projectPath = path.resolve(next);
|
|
84
|
+
index += 1;
|
|
85
|
+
continue;
|
|
86
|
+
}
|
|
87
|
+
if (token === '--json') {
|
|
88
|
+
options.json = true;
|
|
89
|
+
continue;
|
|
90
|
+
}
|
|
91
|
+
if (token === '--fail-on-redline') {
|
|
92
|
+
options.failOnRedline = true;
|
|
93
|
+
continue;
|
|
94
|
+
}
|
|
95
|
+
if (token === '--out' && next) {
|
|
96
|
+
options.out = path.resolve(next);
|
|
97
|
+
index += 1;
|
|
98
|
+
continue;
|
|
99
|
+
}
|
|
100
|
+
if (token === '--scan-dir' && next) {
|
|
101
|
+
options.scanDirs.push(next.trim());
|
|
102
|
+
index += 1;
|
|
103
|
+
continue;
|
|
104
|
+
}
|
|
105
|
+
}
|
|
106
|
+
|
|
107
|
+
options.scanDirs = Array.from(new Set(options.scanDirs.filter(Boolean)));
|
|
108
|
+
return options;
|
|
109
|
+
}
|
|
110
|
+
|
|
111
|
+
function normalizePath(value) {
|
|
112
|
+
return `${value || ''}`.replace(/\\/g, '/');
|
|
113
|
+
}
|
|
114
|
+
|
|
115
|
+
function shouldSkipDir(relativePath, entryName) {
|
|
116
|
+
if (SKIP_DIRS.has(entryName)) {
|
|
117
|
+
return true;
|
|
118
|
+
}
|
|
119
|
+
return SKIP_DIRS.has(normalizePath(relativePath));
|
|
120
|
+
}
|
|
121
|
+
|
|
122
|
+
function collectFilesRecursive(rootDir, relativeRoot = '') {
|
|
123
|
+
if (!fs.existsSync(rootDir)) {
|
|
124
|
+
return [];
|
|
125
|
+
}
|
|
126
|
+
|
|
127
|
+
const results = [];
|
|
128
|
+
const entries = fs.readdirSync(rootDir, { withFileTypes: true });
|
|
129
|
+
for (const entry of entries) {
|
|
130
|
+
const absolutePath = path.join(rootDir, entry.name);
|
|
131
|
+
const relativePath = normalizePath(path.join(relativeRoot, entry.name));
|
|
132
|
+
if (entry.isDirectory()) {
|
|
133
|
+
if (shouldSkipDir(relativePath, entry.name)) {
|
|
134
|
+
continue;
|
|
135
|
+
}
|
|
136
|
+
results.push(...collectFilesRecursive(absolutePath, relativePath));
|
|
137
|
+
continue;
|
|
138
|
+
}
|
|
139
|
+
if (!entry.isFile()) {
|
|
140
|
+
continue;
|
|
141
|
+
}
|
|
142
|
+
if (!CODE_EXTENSIONS.has(path.extname(entry.name).toLowerCase())) {
|
|
143
|
+
continue;
|
|
144
|
+
}
|
|
145
|
+
results.push({
|
|
146
|
+
absolutePath,
|
|
147
|
+
relativePath
|
|
148
|
+
});
|
|
149
|
+
}
|
|
150
|
+
return results;
|
|
151
|
+
}
|
|
152
|
+
|
|
153
|
+
function classifyFile(relativePath) {
|
|
154
|
+
const normalized = normalizePath(relativePath).toLowerCase();
|
|
155
|
+
if (
|
|
156
|
+
normalized.startsWith('tests/')
|
|
157
|
+
|| normalized.includes('/__tests__/')
|
|
158
|
+
|| normalized.includes('.test.')
|
|
159
|
+
|| normalized.includes('.spec.')
|
|
160
|
+
) {
|
|
161
|
+
return 'test';
|
|
162
|
+
}
|
|
163
|
+
return 'source';
|
|
164
|
+
}
|
|
165
|
+
|
|
166
|
+
function countLines(content) {
|
|
167
|
+
if (!content) {
|
|
168
|
+
return 0;
|
|
169
|
+
}
|
|
170
|
+
return content.split(/\r?\n/).length;
|
|
171
|
+
}
|
|
172
|
+
|
|
173
|
+
function roundUp(value, step) {
|
|
174
|
+
if (!Number.isFinite(value) || value <= 0) {
|
|
175
|
+
return 0;
|
|
176
|
+
}
|
|
177
|
+
return Math.ceil(value / step) * step;
|
|
178
|
+
}
|
|
179
|
+
|
|
180
|
+
function quantile(sortedValues, ratio) {
|
|
181
|
+
if (!Array.isArray(sortedValues) || sortedValues.length === 0) {
|
|
182
|
+
return 0;
|
|
183
|
+
}
|
|
184
|
+
if (sortedValues.length === 1) {
|
|
185
|
+
return sortedValues[0];
|
|
186
|
+
}
|
|
187
|
+
const index = Math.max(0, Math.min(sortedValues.length - 1, Math.ceil(sortedValues.length * ratio) - 1));
|
|
188
|
+
return sortedValues[index];
|
|
189
|
+
}
|
|
190
|
+
|
|
191
|
+
function buildStats(files) {
|
|
192
|
+
const lineValues = files.map((item) => item.lines).sort((left, right) => left - right);
|
|
193
|
+
const count = lineValues.length;
|
|
194
|
+
return {
|
|
195
|
+
count,
|
|
196
|
+
max: count > 0 ? lineValues[count - 1] : 0,
|
|
197
|
+
p50: quantile(lineValues, 0.5),
|
|
198
|
+
p90: quantile(lineValues, 0.9),
|
|
199
|
+
p95: quantile(lineValues, 0.95)
|
|
200
|
+
};
|
|
201
|
+
}
|
|
202
|
+
|
|
203
|
+
function buildRecommendedThresholds(stats, profile) {
|
|
204
|
+
const assessmentRaw = roundUp(Math.max(profile.floors.assessment, stats.p90 * 1.25), 50);
|
|
205
|
+
const refactorRaw = roundUp(Math.max(profile.floors.refactor, stats.p95 * 1.35, assessmentRaw + 400), 50);
|
|
206
|
+
|
|
207
|
+
const assessment = Math.min(profile.defaults.assessment, Math.max(profile.floors.assessment, assessmentRaw));
|
|
208
|
+
const refactor = Math.min(profile.defaults.refactor, Math.max(profile.floors.refactor, refactorRaw, assessment + 400));
|
|
209
|
+
const redline = profile.defaults.redline;
|
|
210
|
+
|
|
211
|
+
return {
|
|
212
|
+
assessment,
|
|
213
|
+
refactor,
|
|
214
|
+
redline
|
|
215
|
+
};
|
|
216
|
+
}
|
|
217
|
+
|
|
218
|
+
function evaluateFile(lines, thresholds) {
|
|
219
|
+
if (lines >= thresholds.redline) {
|
|
220
|
+
return 'redline';
|
|
221
|
+
}
|
|
222
|
+
if (lines >= thresholds.refactor) {
|
|
223
|
+
return 'refactor';
|
|
224
|
+
}
|
|
225
|
+
if (lines >= thresholds.assessment) {
|
|
226
|
+
return 'assessment';
|
|
227
|
+
}
|
|
228
|
+
return 'ok';
|
|
229
|
+
}
|
|
230
|
+
|
|
231
|
+
function auditRefactorTriggers(options = {}) {
|
|
232
|
+
const projectPath = path.resolve(options.projectPath || process.cwd());
|
|
233
|
+
const scanDirs = Array.isArray(options.scanDirs) && options.scanDirs.length > 0
|
|
234
|
+
? options.scanDirs
|
|
235
|
+
: DEFAULT_SCAN_DIRS;
|
|
236
|
+
|
|
237
|
+
const files = Array.from(new Set(scanDirs))
|
|
238
|
+
.flatMap((dirName) => collectFilesRecursive(path.join(projectPath, dirName), dirName))
|
|
239
|
+
.map((file) => {
|
|
240
|
+
const content = fs.readFileSync(file.absolutePath, 'utf8');
|
|
241
|
+
const kind = classifyFile(file.relativePath);
|
|
242
|
+
return {
|
|
243
|
+
file: file.relativePath,
|
|
244
|
+
kind,
|
|
245
|
+
lines: countLines(content)
|
|
246
|
+
};
|
|
247
|
+
})
|
|
248
|
+
.sort((left, right) => right.lines - left.lines || left.file.localeCompare(right.file));
|
|
249
|
+
|
|
250
|
+
const sourceFiles = files.filter((item) => item.kind === 'source');
|
|
251
|
+
const testFiles = files.filter((item) => item.kind === 'test');
|
|
252
|
+
const sourceStats = buildStats(sourceFiles);
|
|
253
|
+
const testStats = buildStats(testFiles);
|
|
254
|
+
const sourceThresholds = buildRecommendedThresholds(sourceStats, THRESHOLD_PROFILES.source);
|
|
255
|
+
const testThresholds = buildRecommendedThresholds(testStats, THRESHOLD_PROFILES.test);
|
|
256
|
+
|
|
257
|
+
const evaluatedFiles = files.map((file) => {
|
|
258
|
+
const thresholds = file.kind === 'test' ? testThresholds : sourceThresholds;
|
|
259
|
+
return {
|
|
260
|
+
...file,
|
|
261
|
+
thresholds,
|
|
262
|
+
trigger: evaluateFile(file.lines, thresholds)
|
|
263
|
+
};
|
|
264
|
+
});
|
|
265
|
+
|
|
266
|
+
const offenders = evaluatedFiles.filter((item) => item.trigger !== 'ok');
|
|
267
|
+
const redline = offenders.filter((item) => item.trigger === 'redline');
|
|
268
|
+
const refactor = offenders.filter((item) => item.trigger === 'refactor');
|
|
269
|
+
const assessment = offenders.filter((item) => item.trigger === 'assessment');
|
|
270
|
+
|
|
271
|
+
const recommendations = [
|
|
272
|
+
'Run this audit weekly and before each release to recalibrate project-specific refactor trigger points.',
|
|
273
|
+
'When no project-specific threshold is agreed yet, keep the SCE default source thresholds as the outer guardrail: 2000 / 4000 / 10000 lines.',
|
|
274
|
+
redline.length > 0
|
|
275
|
+
? 'Redline files already exist; new non-emergency changes touching those files should prioritize decomposition instead of feature accretion.'
|
|
276
|
+
: 'No redline file detected under the current recommended thresholds.'
|
|
277
|
+
];
|
|
278
|
+
|
|
279
|
+
return {
|
|
280
|
+
mode: 'refactor-trigger-audit',
|
|
281
|
+
project_path: projectPath,
|
|
282
|
+
passed: redline.length === 0,
|
|
283
|
+
scan_dirs: scanDirs,
|
|
284
|
+
scanned_file_count: files.length,
|
|
285
|
+
cadence_recommendation: ['weekly', 'before_release'],
|
|
286
|
+
thresholds: {
|
|
287
|
+
source: sourceThresholds,
|
|
288
|
+
test: testThresholds
|
|
289
|
+
},
|
|
290
|
+
stats: {
|
|
291
|
+
source: sourceStats,
|
|
292
|
+
test: testStats
|
|
293
|
+
},
|
|
294
|
+
summary: {
|
|
295
|
+
offender_count: offenders.length,
|
|
296
|
+
assessment_count: assessment.length,
|
|
297
|
+
refactor_count: refactor.length,
|
|
298
|
+
redline_count: redline.length
|
|
299
|
+
},
|
|
300
|
+
offenders: offenders.slice(0, 50),
|
|
301
|
+
top_files: evaluatedFiles.slice(0, 20),
|
|
302
|
+
recommendations
|
|
303
|
+
};
|
|
304
|
+
}
|
|
305
|
+
|
|
306
|
+
function maybeWriteReport(result, outPath) {
|
|
307
|
+
if (!outPath) {
|
|
308
|
+
return;
|
|
309
|
+
}
|
|
310
|
+
fs.mkdirSync(path.dirname(outPath), { recursive: true });
|
|
311
|
+
fs.writeFileSync(outPath, `${JSON.stringify(result, null, 2)}\n`, 'utf8');
|
|
312
|
+
}
|
|
313
|
+
|
|
314
|
+
function printHumanReport(result) {
|
|
315
|
+
console.log(
|
|
316
|
+
`[refactor-trigger-audit] scanned=${result.scanned_file_count} offenders=${result.summary.offender_count} `
|
|
317
|
+
+ `assessment=${result.summary.assessment_count} refactor=${result.summary.refactor_count} redline=${result.summary.redline_count}`
|
|
318
|
+
);
|
|
319
|
+
console.log(
|
|
320
|
+
`[refactor-trigger-audit] source-thresholds=${result.thresholds.source.assessment}/${result.thresholds.source.refactor}/${result.thresholds.source.redline} `
|
|
321
|
+
+ `test-thresholds=${result.thresholds.test.assessment}/${result.thresholds.test.refactor}/${result.thresholds.test.redline}`
|
|
322
|
+
);
|
|
323
|
+
if (result.summary.redline_count > 0) {
|
|
324
|
+
result.offenders
|
|
325
|
+
.filter((item) => item.trigger === 'redline')
|
|
326
|
+
.slice(0, 10)
|
|
327
|
+
.forEach((item) => {
|
|
328
|
+
console.error(`[refactor-trigger-audit] redline ${item.kind} ${item.file} (${item.lines} lines)`);
|
|
329
|
+
});
|
|
330
|
+
}
|
|
331
|
+
}
|
|
332
|
+
|
|
333
|
+
if (require.main === module) {
|
|
334
|
+
const options = parseArgs(process.argv.slice(2));
|
|
335
|
+
const result = auditRefactorTriggers(options);
|
|
336
|
+
maybeWriteReport(result, options.out);
|
|
337
|
+
|
|
338
|
+
if (options.json) {
|
|
339
|
+
process.stdout.write(`${JSON.stringify(result, null, 2)}\n`);
|
|
340
|
+
} else {
|
|
341
|
+
printHumanReport(result);
|
|
342
|
+
}
|
|
343
|
+
|
|
344
|
+
if (options.failOnRedline && result.summary.redline_count > 0) {
|
|
345
|
+
process.exit(1);
|
|
346
|
+
}
|
|
347
|
+
}
|
|
348
|
+
|
|
349
|
+
module.exports = {
|
|
350
|
+
DEFAULT_SCAN_DIRS,
|
|
351
|
+
THRESHOLD_PROFILES,
|
|
352
|
+
auditRefactorTriggers,
|
|
353
|
+
buildRecommendedThresholds,
|
|
354
|
+
buildStats,
|
|
355
|
+
classifyFile,
|
|
356
|
+
parseArgs
|
|
357
|
+
};
|
|
@@ -0,0 +1,317 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
'use strict';
|
|
3
|
+
|
|
4
|
+
const fs = require('fs');
|
|
5
|
+
const path = require('path');
|
|
6
|
+
|
|
7
|
+
const RELEASE_DOCS = [
|
|
8
|
+
{
|
|
9
|
+
file: 'README.md',
|
|
10
|
+
label: 'README.md',
|
|
11
|
+
versionField: 'Version',
|
|
12
|
+
versionPattern: /\*\*Version\*\*:\s*([^\s]+)/,
|
|
13
|
+
updatedField: 'Last Updated',
|
|
14
|
+
updatedPattern: /\*\*Last Updated\*\*:\s*(\d{4}-\d{2}-\d{2})/
|
|
15
|
+
},
|
|
16
|
+
{
|
|
17
|
+
file: 'README.zh.md',
|
|
18
|
+
label: 'README.zh.md',
|
|
19
|
+
versionField: '版本',
|
|
20
|
+
versionPattern: /\*\*版本\*\*[::]\s*([^\s]+)/,
|
|
21
|
+
updatedField: '最后更新',
|
|
22
|
+
updatedPattern: /\*\*最后更新\*\*[::]\s*(\d{4}-\d{2}-\d{2})/
|
|
23
|
+
},
|
|
24
|
+
{
|
|
25
|
+
file: '.sce/README.md',
|
|
26
|
+
label: '.sce/README.md',
|
|
27
|
+
versionField: 'sce Version',
|
|
28
|
+
versionPattern: /\*\*sce Version\*\*:\s*([^\s]+)/,
|
|
29
|
+
updatedField: 'Last Updated',
|
|
30
|
+
updatedPattern: /\*\*Last Updated\*\*:\s*(\d{4}-\d{2}-\d{2})/,
|
|
31
|
+
forbidVersionedHeadings: true
|
|
32
|
+
},
|
|
33
|
+
{
|
|
34
|
+
file: 'template/.sce/README.md',
|
|
35
|
+
label: 'template/.sce/README.md',
|
|
36
|
+
versionField: 'sce Version',
|
|
37
|
+
versionPattern: /\*\*sce Version\*\*:\s*([^\s]+)/,
|
|
38
|
+
updatedField: 'Last Updated',
|
|
39
|
+
updatedPattern: /\*\*Last Updated\*\*:\s*(\d{4}-\d{2}-\d{2})/,
|
|
40
|
+
forbidVersionedHeadings: true
|
|
41
|
+
}
|
|
42
|
+
];
|
|
43
|
+
|
|
44
|
+
const VERSIONED_HEADING_PATTERN = /^#{1,6}\s+.*\((?:v)?\d+\.\d+(?:\.\d+|\.x)\)\s*$/gm;
|
|
45
|
+
const CHANGELOG_RELEASE_PATTERN = /^## \[([^\]]+)\] - (\d{4}-\d{2}-\d{2})(?:\s.*)?$/gm;
|
|
46
|
+
|
|
47
|
+
function parseArgs(argv = process.argv.slice(2)) {
|
|
48
|
+
const options = {
|
|
49
|
+
projectPath: process.cwd(),
|
|
50
|
+
json: false,
|
|
51
|
+
failOnError: false,
|
|
52
|
+
out: null
|
|
53
|
+
};
|
|
54
|
+
|
|
55
|
+
for (let index = 0; index < argv.length; index += 1) {
|
|
56
|
+
const value = argv[index];
|
|
57
|
+
if (value === '--json') {
|
|
58
|
+
options.json = true;
|
|
59
|
+
continue;
|
|
60
|
+
}
|
|
61
|
+
if (value === '--fail-on-error') {
|
|
62
|
+
options.failOnError = true;
|
|
63
|
+
continue;
|
|
64
|
+
}
|
|
65
|
+
if (value === '--project-path') {
|
|
66
|
+
options.projectPath = path.resolve(argv[index + 1] || process.cwd());
|
|
67
|
+
index += 1;
|
|
68
|
+
continue;
|
|
69
|
+
}
|
|
70
|
+
if (value === '--out') {
|
|
71
|
+
options.out = path.resolve(argv[index + 1] || '');
|
|
72
|
+
index += 1;
|
|
73
|
+
continue;
|
|
74
|
+
}
|
|
75
|
+
}
|
|
76
|
+
|
|
77
|
+
return options;
|
|
78
|
+
}
|
|
79
|
+
|
|
80
|
+
function pushViolation(violations, file, rule, message, suggestion) {
|
|
81
|
+
violations.push({
|
|
82
|
+
severity: 'error',
|
|
83
|
+
file,
|
|
84
|
+
rule,
|
|
85
|
+
message,
|
|
86
|
+
suggestion
|
|
87
|
+
});
|
|
88
|
+
}
|
|
89
|
+
|
|
90
|
+
function loadPackageVersion(projectPath, violations) {
|
|
91
|
+
const packageJsonPath = path.join(projectPath, 'package.json');
|
|
92
|
+
if (!fs.existsSync(packageJsonPath)) {
|
|
93
|
+
pushViolation(
|
|
94
|
+
violations,
|
|
95
|
+
'package.json',
|
|
96
|
+
'missing_package_json',
|
|
97
|
+
'package.json is required to resolve the current release version.',
|
|
98
|
+
'Restore package.json before running the release doc audit.'
|
|
99
|
+
);
|
|
100
|
+
return null;
|
|
101
|
+
}
|
|
102
|
+
|
|
103
|
+
const packageJson = JSON.parse(fs.readFileSync(packageJsonPath, 'utf8'));
|
|
104
|
+
return typeof packageJson.version === 'string' ? packageJson.version.trim() : null;
|
|
105
|
+
}
|
|
106
|
+
|
|
107
|
+
function extractLatestChangelogRelease(changelogContent) {
|
|
108
|
+
CHANGELOG_RELEASE_PATTERN.lastIndex = 0;
|
|
109
|
+
let match = CHANGELOG_RELEASE_PATTERN.exec(changelogContent);
|
|
110
|
+
if (!match) {
|
|
111
|
+
return null;
|
|
112
|
+
}
|
|
113
|
+
|
|
114
|
+
return {
|
|
115
|
+
version: match[1].trim(),
|
|
116
|
+
date: match[2].trim()
|
|
117
|
+
};
|
|
118
|
+
}
|
|
119
|
+
|
|
120
|
+
function loadLatestChangelogRelease(projectPath, packageVersion, violations) {
|
|
121
|
+
const changelogPath = path.join(projectPath, 'CHANGELOG.md');
|
|
122
|
+
if (!fs.existsSync(changelogPath)) {
|
|
123
|
+
pushViolation(
|
|
124
|
+
violations,
|
|
125
|
+
'CHANGELOG.md',
|
|
126
|
+
'missing_changelog',
|
|
127
|
+
'CHANGELOG.md is required to resolve the latest release date.',
|
|
128
|
+
'Restore CHANGELOG.md before running the release doc audit.'
|
|
129
|
+
);
|
|
130
|
+
return null;
|
|
131
|
+
}
|
|
132
|
+
|
|
133
|
+
const changelogContent = fs.readFileSync(changelogPath, 'utf8');
|
|
134
|
+
const latestRelease = extractLatestChangelogRelease(changelogContent);
|
|
135
|
+
if (!latestRelease) {
|
|
136
|
+
pushViolation(
|
|
137
|
+
violations,
|
|
138
|
+
'CHANGELOG.md',
|
|
139
|
+
'missing_release_entry',
|
|
140
|
+
'Could not find a released version entry in CHANGELOG.md.',
|
|
141
|
+
'Add a release entry like `## [x.y.z] - YYYY-MM-DD` before publishing.'
|
|
142
|
+
);
|
|
143
|
+
return null;
|
|
144
|
+
}
|
|
145
|
+
|
|
146
|
+
if (packageVersion && latestRelease.version !== packageVersion) {
|
|
147
|
+
pushViolation(
|
|
148
|
+
violations,
|
|
149
|
+
'CHANGELOG.md',
|
|
150
|
+
'stale_latest_release_entry',
|
|
151
|
+
`CHANGELOG.md latest release is ${latestRelease.version} but package.json is ${packageVersion}.`,
|
|
152
|
+
'Update the top released CHANGELOG entry so version and release date match package.json.'
|
|
153
|
+
);
|
|
154
|
+
}
|
|
155
|
+
|
|
156
|
+
return latestRelease;
|
|
157
|
+
}
|
|
158
|
+
|
|
159
|
+
function extractField(content, pattern) {
|
|
160
|
+
const match = content.match(pattern);
|
|
161
|
+
return match ? match[1].trim() : null;
|
|
162
|
+
}
|
|
163
|
+
|
|
164
|
+
function collectVersionedHeadings(content) {
|
|
165
|
+
const matches = [];
|
|
166
|
+
let match = VERSIONED_HEADING_PATTERN.exec(content);
|
|
167
|
+
while (match) {
|
|
168
|
+
matches.push(match[0].trim());
|
|
169
|
+
match = VERSIONED_HEADING_PATTERN.exec(content);
|
|
170
|
+
}
|
|
171
|
+
VERSIONED_HEADING_PATTERN.lastIndex = 0;
|
|
172
|
+
return matches;
|
|
173
|
+
}
|
|
174
|
+
|
|
175
|
+
function auditReleaseDocs(options = {}) {
|
|
176
|
+
const projectPath = path.resolve(options.projectPath || process.cwd());
|
|
177
|
+
const violations = [];
|
|
178
|
+
const packageVersion = loadPackageVersion(projectPath, violations);
|
|
179
|
+
const latestRelease = loadLatestChangelogRelease(projectPath, packageVersion, violations);
|
|
180
|
+
const expectedVersion = packageVersion;
|
|
181
|
+
const expectedDate = latestRelease ? latestRelease.date : null;
|
|
182
|
+
const documents = [];
|
|
183
|
+
|
|
184
|
+
for (const doc of RELEASE_DOCS) {
|
|
185
|
+
const absolutePath = path.join(projectPath, doc.file);
|
|
186
|
+
if (!fs.existsSync(absolutePath)) {
|
|
187
|
+
pushViolation(
|
|
188
|
+
violations,
|
|
189
|
+
doc.file,
|
|
190
|
+
'missing_release_doc',
|
|
191
|
+
`${doc.file} is missing.`,
|
|
192
|
+
`Restore ${doc.file} so release metadata stays auditable.`
|
|
193
|
+
);
|
|
194
|
+
continue;
|
|
195
|
+
}
|
|
196
|
+
|
|
197
|
+
const content = fs.readFileSync(absolutePath, 'utf8');
|
|
198
|
+
const actualVersion = extractField(content, doc.versionPattern);
|
|
199
|
+
const actualUpdated = extractField(content, doc.updatedPattern);
|
|
200
|
+
const versionedHeadings = doc.forbidVersionedHeadings
|
|
201
|
+
? collectVersionedHeadings(content)
|
|
202
|
+
: [];
|
|
203
|
+
|
|
204
|
+
if (!actualVersion) {
|
|
205
|
+
pushViolation(
|
|
206
|
+
violations,
|
|
207
|
+
doc.file,
|
|
208
|
+
'missing_doc_version_field',
|
|
209
|
+
`${doc.file} is missing the "${doc.versionField}" footer field.`,
|
|
210
|
+
`Add a "${doc.versionField}" footer line that matches package.json version ${expectedVersion || '<unknown>'}.`
|
|
211
|
+
);
|
|
212
|
+
} else if (expectedVersion && actualVersion !== expectedVersion) {
|
|
213
|
+
pushViolation(
|
|
214
|
+
violations,
|
|
215
|
+
doc.file,
|
|
216
|
+
'stale_doc_version',
|
|
217
|
+
`${doc.file} tracks version ${actualVersion} but package.json is ${expectedVersion}.`,
|
|
218
|
+
`Refresh ${doc.file} so "${doc.versionField}" matches ${expectedVersion}.`
|
|
219
|
+
);
|
|
220
|
+
}
|
|
221
|
+
|
|
222
|
+
if (!actualUpdated) {
|
|
223
|
+
pushViolation(
|
|
224
|
+
violations,
|
|
225
|
+
doc.file,
|
|
226
|
+
'missing_doc_updated_field',
|
|
227
|
+
`${doc.file} is missing the "${doc.updatedField}" footer field.`,
|
|
228
|
+
`Add a "${doc.updatedField}" footer line that matches the latest CHANGELOG release date ${expectedDate || '<unknown>'}.`
|
|
229
|
+
);
|
|
230
|
+
} else if (expectedDate && actualUpdated !== expectedDate) {
|
|
231
|
+
pushViolation(
|
|
232
|
+
violations,
|
|
233
|
+
doc.file,
|
|
234
|
+
'stale_doc_updated_date',
|
|
235
|
+
`${doc.file} last updated date is ${actualUpdated} but latest CHANGELOG release date is ${expectedDate}.`,
|
|
236
|
+
`Refresh ${doc.file} so "${doc.updatedField}" matches ${expectedDate}.`
|
|
237
|
+
);
|
|
238
|
+
}
|
|
239
|
+
|
|
240
|
+
if (versionedHeadings.length > 0) {
|
|
241
|
+
pushViolation(
|
|
242
|
+
violations,
|
|
243
|
+
doc.file,
|
|
244
|
+
'versioned_capability_headings',
|
|
245
|
+
`${doc.file} contains version-stamped headings: ${versionedHeadings.join(' | ')}.`,
|
|
246
|
+
'Remove release/version markers from long-lived README headings and keep current version tracking only in the footer.'
|
|
247
|
+
);
|
|
248
|
+
}
|
|
249
|
+
|
|
250
|
+
documents.push({
|
|
251
|
+
file: doc.file,
|
|
252
|
+
path: absolutePath,
|
|
253
|
+
actual_version: actualVersion,
|
|
254
|
+
expected_version: expectedVersion,
|
|
255
|
+
actual_updated: actualUpdated,
|
|
256
|
+
expected_updated: expectedDate,
|
|
257
|
+
versioned_heading_count: versionedHeadings.length
|
|
258
|
+
});
|
|
259
|
+
}
|
|
260
|
+
|
|
261
|
+
return {
|
|
262
|
+
mode: 'release-doc-version-audit',
|
|
263
|
+
passed: violations.length === 0,
|
|
264
|
+
project_path: projectPath,
|
|
265
|
+
package_version: packageVersion,
|
|
266
|
+
changelog_release: latestRelease,
|
|
267
|
+
error_count: violations.length,
|
|
268
|
+
documents,
|
|
269
|
+
violations
|
|
270
|
+
};
|
|
271
|
+
}
|
|
272
|
+
|
|
273
|
+
function printHumanReport(result) {
|
|
274
|
+
if (result.violations.length === 0) {
|
|
275
|
+
console.log('Release doc version audit passed: README release metadata matches package.json and CHANGELOG.');
|
|
276
|
+
return;
|
|
277
|
+
}
|
|
278
|
+
|
|
279
|
+
console.error(`Release doc version audit found ${result.error_count} error(s).`);
|
|
280
|
+
for (const violation of result.violations) {
|
|
281
|
+
console.error(`- ${violation.file} / ${violation.rule}: ${violation.message}`);
|
|
282
|
+
if (violation.suggestion) {
|
|
283
|
+
console.error(` suggestion: ${violation.suggestion}`);
|
|
284
|
+
}
|
|
285
|
+
}
|
|
286
|
+
}
|
|
287
|
+
|
|
288
|
+
function maybeWriteReport(outputPath, result) {
|
|
289
|
+
if (!outputPath) {
|
|
290
|
+
return;
|
|
291
|
+
}
|
|
292
|
+
fs.mkdirSync(path.dirname(outputPath), { recursive: true });
|
|
293
|
+
fs.writeFileSync(outputPath, `${JSON.stringify(result, null, 2)}\n`, 'utf8');
|
|
294
|
+
}
|
|
295
|
+
|
|
296
|
+
if (require.main === module) {
|
|
297
|
+
const options = parseArgs(process.argv.slice(2));
|
|
298
|
+
const result = auditReleaseDocs(options);
|
|
299
|
+
maybeWriteReport(options.out, result);
|
|
300
|
+
|
|
301
|
+
if (options.json) {
|
|
302
|
+
process.stdout.write(`${JSON.stringify(result, null, 2)}\n`);
|
|
303
|
+
} else {
|
|
304
|
+
printHumanReport(result);
|
|
305
|
+
}
|
|
306
|
+
|
|
307
|
+
if (options.failOnError && result.error_count > 0) {
|
|
308
|
+
process.exit(1);
|
|
309
|
+
}
|
|
310
|
+
}
|
|
311
|
+
|
|
312
|
+
module.exports = {
|
|
313
|
+
RELEASE_DOCS,
|
|
314
|
+
auditReleaseDocs,
|
|
315
|
+
extractLatestChangelogRelease,
|
|
316
|
+
parseArgs
|
|
317
|
+
};
|