docguard-cli 0.5.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/PHILOSOPHY.md +150 -0
- package/README.md +309 -0
- package/STANDARD.md +751 -0
- package/cli/commands/agents.mjs +221 -0
- package/cli/commands/audit.mjs +92 -0
- package/cli/commands/badge.mjs +72 -0
- package/cli/commands/ci.mjs +80 -0
- package/cli/commands/diagnose.mjs +273 -0
- package/cli/commands/diff.mjs +360 -0
- package/cli/commands/fix.mjs +610 -0
- package/cli/commands/generate.mjs +842 -0
- package/cli/commands/guard.mjs +158 -0
- package/cli/commands/hooks.mjs +227 -0
- package/cli/commands/init.mjs +249 -0
- package/cli/commands/score.mjs +396 -0
- package/cli/commands/watch.mjs +143 -0
- package/cli/docguard.mjs +458 -0
- package/cli/validators/architecture.mjs +380 -0
- package/cli/validators/changelog.mjs +39 -0
- package/cli/validators/docs-sync.mjs +110 -0
- package/cli/validators/drift.mjs +101 -0
- package/cli/validators/environment.mjs +70 -0
- package/cli/validators/freshness.mjs +224 -0
- package/cli/validators/security.mjs +101 -0
- package/cli/validators/structure.mjs +88 -0
- package/cli/validators/test-spec.mjs +115 -0
- package/docs/ai-integration.md +179 -0
- package/docs/commands.md +239 -0
- package/docs/configuration.md +96 -0
- package/docs/faq.md +155 -0
- package/docs/installation.md +81 -0
- package/docs/profiles.md +103 -0
- package/docs/quickstart.md +79 -0
- package/package.json +57 -0
- package/templates/ADR.md.template +64 -0
- package/templates/AGENTS.md.template +88 -0
- package/templates/ARCHITECTURE.md.template +78 -0
- package/templates/CHANGELOG.md.template +16 -0
- package/templates/CURRENT-STATE.md.template +64 -0
- package/templates/DATA-MODEL.md.template +66 -0
- package/templates/DEPLOYMENT.md.template +66 -0
- package/templates/DRIFT-LOG.md.template +18 -0
- package/templates/ENVIRONMENT.md.template +43 -0
- package/templates/KNOWN-GOTCHAS.md.template +69 -0
- package/templates/ROADMAP.md.template +82 -0
- package/templates/RUNBOOKS.md.template +115 -0
- package/templates/SECURITY.md.template +42 -0
- package/templates/TEST-SPEC.md.template +55 -0
- package/templates/TROUBLESHOOTING.md.template +96 -0
- package/templates/VENDOR-BUGS.md.template +74 -0
- package/templates/ci/github-actions.yml +39 -0
- package/templates/commands/docguard.fix.md +65 -0
- package/templates/commands/docguard.guard.md +40 -0
- package/templates/commands/docguard.init.md +62 -0
- package/templates/commands/docguard.review.md +44 -0
- package/templates/commands/docguard.update.md +44 -0
|
@@ -0,0 +1,380 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Architecture Validator — Enhanced with automatic import analysis
|
|
3
|
+
*
|
|
4
|
+
* Two modes:
|
|
5
|
+
* 1. Config-driven: Uses `layers` from .docguard.json (existing behavior)
|
|
6
|
+
* 2. Auto-detect: Scans ARCHITECTURE.md for layer boundary declarations,
|
|
7
|
+
* then validates imports across the codebase.
|
|
8
|
+
*
|
|
9
|
+
* Import violations detected:
|
|
10
|
+
* - Circular dependencies (A → B → A)
|
|
11
|
+
* - Layer boundary violations (routes importing from routes, etc.)
|
|
12
|
+
* - Orphan modules (code files with 0 inbound imports)
|
|
13
|
+
*/
|
|
14
|
+
|
|
15
|
+
import { existsSync, readFileSync, readdirSync, statSync } from 'node:fs';
|
|
16
|
+
import { resolve, join, extname, relative, dirname, basename } from 'node:path';
|
|
17
|
+
|
|
18
|
+
const IGNORE_DIRS = new Set([
|
|
19
|
+
'node_modules', '.git', '.next', 'dist', 'build',
|
|
20
|
+
'coverage', '.cache', '__pycache__', '.venv', 'vendor',
|
|
21
|
+
'templates', 'configs', 'Research', 'docs-canonical', 'docs-implementation',
|
|
22
|
+
]);
|
|
23
|
+
|
|
24
|
+
const CODE_EXTENSIONS = new Set(['.ts', '.tsx', '.js', '.mjs', '.cjs', '.jsx']);
|
|
25
|
+
|
|
26
|
+
export function validateArchitecture(projectDir, config) {
|
|
27
|
+
const results = { name: 'architecture', errors: [], warnings: [], passed: 0, total: 0 };
|
|
28
|
+
|
|
29
|
+
// ── 1. Config-driven layer validation ──
|
|
30
|
+
const layers = config.layers;
|
|
31
|
+
if (layers && Object.keys(layers).length > 0) {
|
|
32
|
+
validateConfigLayers(projectDir, config, layers, results);
|
|
33
|
+
}
|
|
34
|
+
|
|
35
|
+
// ── 2. Auto-detect import graph ──
|
|
36
|
+
const importGraph = buildImportGraph(projectDir);
|
|
37
|
+
if (importGraph.files.length === 0) return results;
|
|
38
|
+
|
|
39
|
+
// ── 3. Detect circular dependencies ──
|
|
40
|
+
const circles = detectCircularDeps(importGraph);
|
|
41
|
+
for (const circle of circles) {
|
|
42
|
+
results.total++;
|
|
43
|
+
results.warnings.push(`Circular dependency: ${circle.join(' → ')}`);
|
|
44
|
+
}
|
|
45
|
+
|
|
46
|
+
// ── 4. Check layer boundaries from ARCHITECTURE.md ──
|
|
47
|
+
const archPath = resolve(projectDir, 'docs-canonical/ARCHITECTURE.md');
|
|
48
|
+
if (existsSync(archPath)) {
|
|
49
|
+
const archContent = readFileSync(archPath, 'utf-8');
|
|
50
|
+
const declaredLayers = parseLayerBoundaries(archContent);
|
|
51
|
+
|
|
52
|
+
if (declaredLayers.length > 0) {
|
|
53
|
+
validateLayerBoundaries(projectDir, importGraph, declaredLayers, results);
|
|
54
|
+
}
|
|
55
|
+
}
|
|
56
|
+
|
|
57
|
+
// ── 5. Report import stats ──
|
|
58
|
+
if (results.total === 0) {
|
|
59
|
+
results.total = 1;
|
|
60
|
+
results.passed = 1;
|
|
61
|
+
}
|
|
62
|
+
|
|
63
|
+
return results;
|
|
64
|
+
}
|
|
65
|
+
|
|
66
|
+
// ── Config-driven validation (existing behavior) ────────────────────────────
|
|
67
|
+
|
|
68
|
+
function validateConfigLayers(projectDir, config, layers, results) {
|
|
69
|
+
const layerMap = {};
|
|
70
|
+
for (const [layerName, layerConfig] of Object.entries(layers)) {
|
|
71
|
+
if (layerConfig.dir && layerConfig.canImport) {
|
|
72
|
+
layerMap[layerConfig.dir] = {
|
|
73
|
+
name: layerName,
|
|
74
|
+
canImport: layerConfig.canImport,
|
|
75
|
+
forbidden: Object.entries(layers)
|
|
76
|
+
.filter(([name]) => !layerConfig.canImport.includes(name) && name !== layerName)
|
|
77
|
+
.map(([, cfg]) => cfg.dir)
|
|
78
|
+
.filter(Boolean),
|
|
79
|
+
};
|
|
80
|
+
}
|
|
81
|
+
}
|
|
82
|
+
|
|
83
|
+
for (const [dir, layer] of Object.entries(layerMap)) {
|
|
84
|
+
const layerDir = resolve(projectDir, dir);
|
|
85
|
+
if (!existsSync(layerDir)) continue;
|
|
86
|
+
|
|
87
|
+
const files = getFilesRecursive(layerDir);
|
|
88
|
+
for (const file of files) {
|
|
89
|
+
if (!CODE_EXTENSIONS.has(extname(file))) continue;
|
|
90
|
+
|
|
91
|
+
const content = readFileSync(file, 'utf-8');
|
|
92
|
+
const relPath = relative(projectDir, file);
|
|
93
|
+
const imports = extractImports(content);
|
|
94
|
+
|
|
95
|
+
for (const imp of imports) {
|
|
96
|
+
if (!imp.startsWith('.') && !imp.startsWith('/')) continue;
|
|
97
|
+
|
|
98
|
+
for (const forbiddenDir of layer.forbidden) {
|
|
99
|
+
if (imp.includes(forbiddenDir) || imp.includes(`/${forbiddenDir}/`)) {
|
|
100
|
+
results.total++;
|
|
101
|
+
results.errors.push(
|
|
102
|
+
`${relPath}: ${layer.name} layer imports from forbidden layer (${forbiddenDir})`
|
|
103
|
+
);
|
|
104
|
+
}
|
|
105
|
+
}
|
|
106
|
+
}
|
|
107
|
+
}
|
|
108
|
+
}
|
|
109
|
+
}
|
|
110
|
+
|
|
111
|
+
// ── Import Graph Builder ────────────────────────────────────────────────────
|
|
112
|
+
|
|
113
|
+
function buildImportGraph(projectDir) {
|
|
114
|
+
const graph = { files: [], edges: [], fileMap: new Map() };
|
|
115
|
+
|
|
116
|
+
const allFiles = getFilesRecursive(projectDir);
|
|
117
|
+
const codeFiles = allFiles.filter(f => CODE_EXTENSIONS.has(extname(f)));
|
|
118
|
+
|
|
119
|
+
for (const file of codeFiles) {
|
|
120
|
+
const relPath = relative(projectDir, file);
|
|
121
|
+
graph.files.push(relPath);
|
|
122
|
+
|
|
123
|
+
try {
|
|
124
|
+
const content = readFileSync(file, 'utf-8');
|
|
125
|
+
const imports = extractImports(content);
|
|
126
|
+
|
|
127
|
+
const resolvedImports = [];
|
|
128
|
+
for (const imp of imports) {
|
|
129
|
+
if (!imp.startsWith('.') && !imp.startsWith('/')) continue;
|
|
130
|
+
|
|
131
|
+
// Resolve relative imports
|
|
132
|
+
const fromDir = dirname(file);
|
|
133
|
+
const resolved = resolveImport(fromDir, imp, projectDir);
|
|
134
|
+
if (resolved) {
|
|
135
|
+
resolvedImports.push(resolved);
|
|
136
|
+
graph.edges.push({ from: relPath, to: resolved });
|
|
137
|
+
}
|
|
138
|
+
}
|
|
139
|
+
|
|
140
|
+
graph.fileMap.set(relPath, resolvedImports);
|
|
141
|
+
} catch { /* skip binary or unreadable files */ }
|
|
142
|
+
}
|
|
143
|
+
|
|
144
|
+
return graph;
|
|
145
|
+
}
|
|
146
|
+
|
|
147
|
+
function extractImports(content) {
|
|
148
|
+
const imports = [];
|
|
149
|
+
|
|
150
|
+
// ES module imports
|
|
151
|
+
const esImportRegex = /import\s+(?:.*?\s+from\s+)?['"]([^'"]+)['"]/g;
|
|
152
|
+
let match;
|
|
153
|
+
while ((match = esImportRegex.exec(content)) !== null) {
|
|
154
|
+
imports.push(match[1]);
|
|
155
|
+
}
|
|
156
|
+
|
|
157
|
+
// Dynamic imports
|
|
158
|
+
const dynamicRegex = /import\s*\(\s*['"]([^'"]+)['"]\s*\)/g;
|
|
159
|
+
while ((match = dynamicRegex.exec(content)) !== null) {
|
|
160
|
+
imports.push(match[1]);
|
|
161
|
+
}
|
|
162
|
+
|
|
163
|
+
// CommonJS require
|
|
164
|
+
const requireRegex = /require\s*\(\s*['"]([^'"]+)['"]\s*\)/g;
|
|
165
|
+
while ((match = requireRegex.exec(content)) !== null) {
|
|
166
|
+
imports.push(match[1]);
|
|
167
|
+
}
|
|
168
|
+
|
|
169
|
+
return imports;
|
|
170
|
+
}
|
|
171
|
+
|
|
172
|
+
function resolveImport(fromDir, importPath, projectDir) {
|
|
173
|
+
// Try to resolve the import to an actual file
|
|
174
|
+
const extensions = ['.ts', '.tsx', '.js', '.mjs', '.jsx', '.cjs'];
|
|
175
|
+
const basePath = resolve(fromDir, importPath);
|
|
176
|
+
|
|
177
|
+
// Direct file match
|
|
178
|
+
for (const ext of extensions) {
|
|
179
|
+
const candidate = basePath + ext;
|
|
180
|
+
if (existsSync(candidate)) {
|
|
181
|
+
return relative(projectDir, candidate);
|
|
182
|
+
}
|
|
183
|
+
}
|
|
184
|
+
|
|
185
|
+
// Exact match (has extension already)
|
|
186
|
+
if (existsSync(basePath)) {
|
|
187
|
+
return relative(projectDir, basePath);
|
|
188
|
+
}
|
|
189
|
+
|
|
190
|
+
// Index file match
|
|
191
|
+
for (const ext of extensions) {
|
|
192
|
+
const candidate = join(basePath, `index${ext}`);
|
|
193
|
+
if (existsSync(candidate)) {
|
|
194
|
+
return relative(projectDir, candidate);
|
|
195
|
+
}
|
|
196
|
+
}
|
|
197
|
+
|
|
198
|
+
return null;
|
|
199
|
+
}
|
|
200
|
+
|
|
201
|
+
// ── Circular Dependency Detection ───────────────────────────────────────────
|
|
202
|
+
|
|
203
|
+
function detectCircularDeps(graph) {
|
|
204
|
+
const circles = [];
|
|
205
|
+
const visited = new Set();
|
|
206
|
+
const inStack = new Set();
|
|
207
|
+
|
|
208
|
+
function dfs(file, path) {
|
|
209
|
+
if (inStack.has(file)) {
|
|
210
|
+
// Found a cycle — extract just the cycle portion
|
|
211
|
+
const cycleStart = path.indexOf(file);
|
|
212
|
+
if (cycleStart !== -1) {
|
|
213
|
+
const cycle = path.slice(cycleStart);
|
|
214
|
+
cycle.push(file); // complete the circle
|
|
215
|
+
// Only report cycles of 2-5 files to avoid noise
|
|
216
|
+
if (cycle.length >= 3 && cycle.length <= 6) {
|
|
217
|
+
circles.push(cycle);
|
|
218
|
+
}
|
|
219
|
+
}
|
|
220
|
+
return;
|
|
221
|
+
}
|
|
222
|
+
if (visited.has(file)) return;
|
|
223
|
+
|
|
224
|
+
visited.add(file);
|
|
225
|
+
inStack.add(file);
|
|
226
|
+
|
|
227
|
+
const deps = graph.fileMap.get(file) || [];
|
|
228
|
+
for (const dep of deps) {
|
|
229
|
+
dfs(dep, [...path, file]);
|
|
230
|
+
}
|
|
231
|
+
|
|
232
|
+
inStack.delete(file);
|
|
233
|
+
}
|
|
234
|
+
|
|
235
|
+
for (const file of graph.files) {
|
|
236
|
+
if (!visited.has(file)) {
|
|
237
|
+
dfs(file, []);
|
|
238
|
+
}
|
|
239
|
+
}
|
|
240
|
+
|
|
241
|
+
// Deduplicate cycles (same cycle can be detected starting from different nodes)
|
|
242
|
+
const seen = new Set();
|
|
243
|
+
return circles.filter(cycle => {
|
|
244
|
+
const key = [...cycle].sort().join('|');
|
|
245
|
+
if (seen.has(key)) return false;
|
|
246
|
+
seen.add(key);
|
|
247
|
+
return true;
|
|
248
|
+
});
|
|
249
|
+
}
|
|
250
|
+
|
|
251
|
+
// ── Layer Boundary Parser ───────────────────────────────────────────────────
|
|
252
|
+
|
|
253
|
+
function parseLayerBoundaries(archContent) {
|
|
254
|
+
const layers = [];
|
|
255
|
+
|
|
256
|
+
// Parse "Layer Boundaries" table from ARCHITECTURE.md
|
|
257
|
+
const tableRegex = /\|\s*(\S+)\s*\|\s*([^|]+)\s*\|\s*([^|]+)\s*\|/g;
|
|
258
|
+
let match;
|
|
259
|
+
let inBoundarySection = false;
|
|
260
|
+
|
|
261
|
+
const lines = archContent.split('\n');
|
|
262
|
+
for (const line of lines) {
|
|
263
|
+
if (line.includes('Layer Boundaries') || line.includes('layer boundaries')) {
|
|
264
|
+
inBoundarySection = true;
|
|
265
|
+
continue;
|
|
266
|
+
}
|
|
267
|
+
if (inBoundarySection && line.startsWith('## ')) {
|
|
268
|
+
break; // next section
|
|
269
|
+
}
|
|
270
|
+
if (!inBoundarySection) continue;
|
|
271
|
+
if (line.includes('---') || line.includes('Layer') && line.includes('Can Import')) continue;
|
|
272
|
+
|
|
273
|
+
match = line.match(/\|\s*(\S+)\s*\|\s*([^|]+)\s*\|\s*([^|]+)\s*\|/);
|
|
274
|
+
if (match) {
|
|
275
|
+
const layerName = match[1].replace(/[`*]/g, '').toLowerCase();
|
|
276
|
+
const canImport = match[2].trim().split(',').map(s => s.trim().replace(/[`*]/g, '').toLowerCase());
|
|
277
|
+
const cannotImport = match[3].trim().split(',').map(s => s.trim().replace(/[`*]/g, '').toLowerCase());
|
|
278
|
+
|
|
279
|
+
// Skip markdown noise
|
|
280
|
+
if (layerName === '<!--' || layerName === 'layer' || layerName.length < 2) continue;
|
|
281
|
+
|
|
282
|
+
layers.push({ name: layerName, canImport, cannotImport });
|
|
283
|
+
}
|
|
284
|
+
}
|
|
285
|
+
|
|
286
|
+
return layers;
|
|
287
|
+
}
|
|
288
|
+
|
|
289
|
+
function validateLayerBoundaries(projectDir, graph, declaredLayers, results) {
|
|
290
|
+
// Map directory patterns to layer names
|
|
291
|
+
const layerDirMap = new Map();
|
|
292
|
+
for (const layer of declaredLayers) {
|
|
293
|
+
// Common directory mappings
|
|
294
|
+
const dirPatterns = getLayerDirPatterns(layer.name);
|
|
295
|
+
for (const pattern of dirPatterns) {
|
|
296
|
+
layerDirMap.set(pattern, layer);
|
|
297
|
+
}
|
|
298
|
+
}
|
|
299
|
+
|
|
300
|
+
// Check each import edge
|
|
301
|
+
for (const edge of graph.edges) {
|
|
302
|
+
const fromLayer = getFileLayer(edge.from, layerDirMap);
|
|
303
|
+
const toLayer = getFileLayer(edge.to, layerDirMap);
|
|
304
|
+
|
|
305
|
+
if (!fromLayer || !toLayer || fromLayer.name === toLayer.name) continue;
|
|
306
|
+
|
|
307
|
+
// Check if this import is forbidden
|
|
308
|
+
if (fromLayer.cannotImport.some(l => l.includes(toLayer.name) || toLayer.name.includes(l))) {
|
|
309
|
+
results.total++;
|
|
310
|
+
results.errors.push(
|
|
311
|
+
`${edge.from}: ${fromLayer.name} → ${toLayer.name} (forbidden by ARCHITECTURE.md)`
|
|
312
|
+
);
|
|
313
|
+
} else {
|
|
314
|
+
results.total++;
|
|
315
|
+
results.passed++;
|
|
316
|
+
}
|
|
317
|
+
}
|
|
318
|
+
}
|
|
319
|
+
|
|
320
|
+
function getLayerDirPatterns(layerName) {
|
|
321
|
+
const patterns = [];
|
|
322
|
+
const clean = layerName.replace(/\//g, '').replace(/\s/g, '').toLowerCase();
|
|
323
|
+
|
|
324
|
+
// Standard patterns
|
|
325
|
+
patterns.push(clean);
|
|
326
|
+
patterns.push(`src/${clean}`);
|
|
327
|
+
|
|
328
|
+
// Common aliases
|
|
329
|
+
const aliases = {
|
|
330
|
+
routes: ['routes', 'src/routes', 'src/app/api', 'api', 'handlers'],
|
|
331
|
+
handlers: ['routes', 'src/routes', 'handlers'],
|
|
332
|
+
services: ['services', 'src/services', 'src/lib'],
|
|
333
|
+
models: ['models', 'src/models', 'entities', 'schema'],
|
|
334
|
+
repositories: ['repositories', 'src/repositories', 'models', 'data'],
|
|
335
|
+
middleware: ['middleware', 'src/middleware'],
|
|
336
|
+
utils: ['utils', 'src/utils', 'helpers', 'src/helpers', 'lib'],
|
|
337
|
+
components: ['components', 'src/components', 'ui'],
|
|
338
|
+
};
|
|
339
|
+
|
|
340
|
+
if (aliases[clean]) {
|
|
341
|
+
patterns.push(...aliases[clean]);
|
|
342
|
+
}
|
|
343
|
+
|
|
344
|
+
return patterns;
|
|
345
|
+
}
|
|
346
|
+
|
|
347
|
+
function getFileLayer(filePath, layerDirMap) {
|
|
348
|
+
for (const [pattern, layer] of layerDirMap) {
|
|
349
|
+
if (filePath.startsWith(pattern + '/') || filePath.includes('/' + pattern + '/')) {
|
|
350
|
+
return layer;
|
|
351
|
+
}
|
|
352
|
+
}
|
|
353
|
+
return null;
|
|
354
|
+
}
|
|
355
|
+
|
|
356
|
+
// ── Utilities ───────────────────────────────────────────────────────────────
|
|
357
|
+
|
|
358
|
+
function getFilesRecursive(dir) {
|
|
359
|
+
const results = [];
|
|
360
|
+
if (!existsSync(dir)) return results;
|
|
361
|
+
|
|
362
|
+
let entries;
|
|
363
|
+
try {
|
|
364
|
+
entries = readdirSync(dir);
|
|
365
|
+
} catch { return results; }
|
|
366
|
+
|
|
367
|
+
for (const entry of entries) {
|
|
368
|
+
if (IGNORE_DIRS.has(entry) || entry.startsWith('.')) continue;
|
|
369
|
+
const fullPath = join(dir, entry);
|
|
370
|
+
try {
|
|
371
|
+
const stat = statSync(fullPath);
|
|
372
|
+
if (stat.isDirectory()) {
|
|
373
|
+
results.push(...getFilesRecursive(fullPath));
|
|
374
|
+
} else {
|
|
375
|
+
results.push(fullPath);
|
|
376
|
+
}
|
|
377
|
+
} catch { /* skip */ }
|
|
378
|
+
}
|
|
379
|
+
return results;
|
|
380
|
+
}
|
|
@@ -0,0 +1,39 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Changelog Validator — Checks CHANGELOG.md has [Unreleased] section
|
|
3
|
+
*/
|
|
4
|
+
|
|
5
|
+
import { existsSync, readFileSync } from 'node:fs';
|
|
6
|
+
import { resolve } from 'node:path';
|
|
7
|
+
|
|
8
|
+
export function validateChangelog(projectDir, config) {
|
|
9
|
+
const results = { name: 'changelog', errors: [], warnings: [], passed: 0, total: 0 };
|
|
10
|
+
|
|
11
|
+
const changelogPath = resolve(projectDir, config.requiredFiles.changelog);
|
|
12
|
+
if (!existsSync(changelogPath)) {
|
|
13
|
+
// Structure validator catches missing files
|
|
14
|
+
return results;
|
|
15
|
+
}
|
|
16
|
+
|
|
17
|
+
const content = readFileSync(changelogPath, 'utf-8');
|
|
18
|
+
|
|
19
|
+
// Check for [Unreleased] section
|
|
20
|
+
results.total++;
|
|
21
|
+
if (content.includes('[Unreleased]') || content.includes('[unreleased]')) {
|
|
22
|
+
results.passed++;
|
|
23
|
+
} else {
|
|
24
|
+
results.warnings.push('CHANGELOG.md: missing [Unreleased] section');
|
|
25
|
+
}
|
|
26
|
+
|
|
27
|
+
// Check it follows Keep a Changelog format (at least has ## headers)
|
|
28
|
+
results.total++;
|
|
29
|
+
const hasVersionHeaders = /^## \[/m.test(content);
|
|
30
|
+
if (hasVersionHeaders) {
|
|
31
|
+
results.passed++;
|
|
32
|
+
} else {
|
|
33
|
+
results.warnings.push(
|
|
34
|
+
'CHANGELOG.md: no version sections found (expected ## [version] format)'
|
|
35
|
+
);
|
|
36
|
+
}
|
|
37
|
+
|
|
38
|
+
return results;
|
|
39
|
+
}
|
|
@@ -0,0 +1,110 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Docs-Sync Validator — Checks that source files have matching canonical doc entries
|
|
3
|
+
*/
|
|
4
|
+
|
|
5
|
+
import { existsSync, readFileSync, readdirSync, statSync } from 'node:fs';
|
|
6
|
+
import { resolve, join, extname, basename } from 'node:path';
|
|
7
|
+
|
|
8
|
+
const IGNORE_DIRS = new Set([
|
|
9
|
+
'node_modules', '.git', '.next', 'dist', 'build',
|
|
10
|
+
'coverage', '.cache', '__pycache__', '.venv', 'vendor',
|
|
11
|
+
]);
|
|
12
|
+
|
|
13
|
+
export function validateDocsSync(projectDir, config) {
|
|
14
|
+
const results = { name: 'docs-sync', errors: [], warnings: [], passed: 0, total: 0 };
|
|
15
|
+
|
|
16
|
+
// Find route/API files and check they're mentioned in canonical docs
|
|
17
|
+
const routePatterns = [
|
|
18
|
+
{ dir: 'src/routes', label: 'route' },
|
|
19
|
+
{ dir: 'src/app/api', label: 'API route' },
|
|
20
|
+
{ dir: 'api', label: 'API route' },
|
|
21
|
+
{ dir: 'routes', label: 'route' },
|
|
22
|
+
];
|
|
23
|
+
|
|
24
|
+
// Load all canonical doc content for checking
|
|
25
|
+
const canonicalDir = resolve(projectDir, 'docs-canonical');
|
|
26
|
+
let canonicalContent = '';
|
|
27
|
+
if (existsSync(canonicalDir)) {
|
|
28
|
+
try {
|
|
29
|
+
const files = readdirSync(canonicalDir).filter(f => f.endsWith('.md'));
|
|
30
|
+
for (const f of files) {
|
|
31
|
+
canonicalContent += readFileSync(resolve(canonicalDir, f), 'utf-8') + '\n';
|
|
32
|
+
}
|
|
33
|
+
} catch {
|
|
34
|
+
// Skip if can't read
|
|
35
|
+
}
|
|
36
|
+
}
|
|
37
|
+
|
|
38
|
+
if (!canonicalContent) {
|
|
39
|
+
return results; // No canonical docs to check against
|
|
40
|
+
}
|
|
41
|
+
|
|
42
|
+
for (const { dir, label } of routePatterns) {
|
|
43
|
+
const routeDir = resolve(projectDir, dir);
|
|
44
|
+
if (!existsSync(routeDir)) continue;
|
|
45
|
+
|
|
46
|
+
const files = getFilesRecursive(routeDir);
|
|
47
|
+
for (const file of files) {
|
|
48
|
+
const ext = extname(file);
|
|
49
|
+
if (!['.ts', '.js', '.mjs', '.py', '.java', '.go'].includes(ext)) continue;
|
|
50
|
+
|
|
51
|
+
results.total++;
|
|
52
|
+
const relPath = file.replace(projectDir + '/', '');
|
|
53
|
+
const name = basename(file, ext);
|
|
54
|
+
|
|
55
|
+
// Check if the file path or name is mentioned in any canonical doc
|
|
56
|
+
if (canonicalContent.includes(relPath) || canonicalContent.includes(name)) {
|
|
57
|
+
results.passed++;
|
|
58
|
+
} else {
|
|
59
|
+
results.warnings.push(`${label} ${relPath} not referenced in any canonical doc`);
|
|
60
|
+
}
|
|
61
|
+
}
|
|
62
|
+
}
|
|
63
|
+
|
|
64
|
+
// Find service files and check they're documented
|
|
65
|
+
const serviceDirs = ['src/services', 'services', 'src/lib'];
|
|
66
|
+
for (const dir of serviceDirs) {
|
|
67
|
+
const serviceDir = resolve(projectDir, dir);
|
|
68
|
+
if (!existsSync(serviceDir)) continue;
|
|
69
|
+
|
|
70
|
+
const files = getFilesRecursive(serviceDir);
|
|
71
|
+
for (const file of files) {
|
|
72
|
+
const ext = extname(file);
|
|
73
|
+
if (!['.ts', '.js', '.mjs', '.py', '.java', '.go'].includes(ext)) continue;
|
|
74
|
+
|
|
75
|
+
results.total++;
|
|
76
|
+
const relPath = file.replace(projectDir + '/', '');
|
|
77
|
+
const name = basename(file, ext);
|
|
78
|
+
|
|
79
|
+
if (canonicalContent.includes(relPath) || canonicalContent.includes(name)) {
|
|
80
|
+
results.passed++;
|
|
81
|
+
} else {
|
|
82
|
+
results.warnings.push(`Service ${relPath} not referenced in any canonical doc`);
|
|
83
|
+
}
|
|
84
|
+
}
|
|
85
|
+
}
|
|
86
|
+
|
|
87
|
+
return results;
|
|
88
|
+
}
|
|
89
|
+
|
|
90
|
+
function getFilesRecursive(dir) {
|
|
91
|
+
const results = [];
|
|
92
|
+
if (!existsSync(dir)) return results;
|
|
93
|
+
|
|
94
|
+
const entries = readdirSync(dir);
|
|
95
|
+
for (const entry of entries) {
|
|
96
|
+
if (IGNORE_DIRS.has(entry) || entry.startsWith('.')) continue;
|
|
97
|
+
const fullPath = join(dir, entry);
|
|
98
|
+
try {
|
|
99
|
+
const stat = statSync(fullPath);
|
|
100
|
+
if (stat.isDirectory()) {
|
|
101
|
+
results.push(...getFilesRecursive(fullPath));
|
|
102
|
+
} else {
|
|
103
|
+
results.push(fullPath);
|
|
104
|
+
}
|
|
105
|
+
} catch {
|
|
106
|
+
// Skip
|
|
107
|
+
}
|
|
108
|
+
}
|
|
109
|
+
return results;
|
|
110
|
+
}
|
|
@@ -0,0 +1,101 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Drift Validator — Every // DRIFT: comment must have a DRIFT-LOG.md entry
|
|
3
|
+
*/
|
|
4
|
+
|
|
5
|
+
import { existsSync, readFileSync, readdirSync, statSync } from 'node:fs';
|
|
6
|
+
import { resolve, join, extname } from 'node:path';
|
|
7
|
+
|
|
8
|
+
const CODE_EXTENSIONS = new Set([
|
|
9
|
+
'.js', '.mjs', '.cjs', '.ts', '.tsx', '.jsx',
|
|
10
|
+
'.py', '.java', '.go', '.rs', '.swift', '.kt',
|
|
11
|
+
'.rb', '.php', '.cs', '.c', '.cpp', '.h',
|
|
12
|
+
]);
|
|
13
|
+
|
|
14
|
+
const IGNORE_DIRS = new Set([
|
|
15
|
+
'node_modules', '.git', '.next', 'dist', 'build',
|
|
16
|
+
'coverage', '.cache', '__pycache__', '.venv', 'vendor',
|
|
17
|
+
'cli', // Exclude DocGuard's own source (contains DRIFT: in regex patterns)
|
|
18
|
+
]);
|
|
19
|
+
|
|
20
|
+
export function validateDrift(projectDir, config) {
|
|
21
|
+
const results = { name: 'drift', errors: [], warnings: [], passed: 0, total: 0 };
|
|
22
|
+
|
|
23
|
+
// Find all // DRIFT: comments in source code
|
|
24
|
+
const driftComments = [];
|
|
25
|
+
walkDir(projectDir, (filePath) => {
|
|
26
|
+
const ext = extname(filePath);
|
|
27
|
+
if (!CODE_EXTENSIONS.has(ext)) return;
|
|
28
|
+
|
|
29
|
+
const content = readFileSync(filePath, 'utf-8');
|
|
30
|
+
const lines = content.split('\n');
|
|
31
|
+
|
|
32
|
+
lines.forEach((line, i) => {
|
|
33
|
+
// Match various comment styles: // DRIFT:, # DRIFT:, /* DRIFT:, -- DRIFT:
|
|
34
|
+
const match = line.match(/(?:\/\/|#|\/\*|\-\-)\s*DRIFT:\s*(.+)/i);
|
|
35
|
+
if (match) {
|
|
36
|
+
driftComments.push({
|
|
37
|
+
file: filePath.replace(projectDir + '/', ''),
|
|
38
|
+
line: i + 1,
|
|
39
|
+
comment: match[1].trim(),
|
|
40
|
+
});
|
|
41
|
+
}
|
|
42
|
+
});
|
|
43
|
+
});
|
|
44
|
+
|
|
45
|
+
if (driftComments.length === 0) {
|
|
46
|
+
results.total = 1;
|
|
47
|
+
results.passed = 1;
|
|
48
|
+
return results;
|
|
49
|
+
}
|
|
50
|
+
|
|
51
|
+
// Read DRIFT-LOG.md
|
|
52
|
+
const driftLogPath = resolve(projectDir, config.requiredFiles.driftLog);
|
|
53
|
+
if (!existsSync(driftLogPath)) {
|
|
54
|
+
results.total = driftComments.length;
|
|
55
|
+
for (const dc of driftComments) {
|
|
56
|
+
results.errors.push(
|
|
57
|
+
`${dc.file}:${dc.line} has DRIFT comment but DRIFT-LOG.md doesn't exist`
|
|
58
|
+
);
|
|
59
|
+
}
|
|
60
|
+
return results;
|
|
61
|
+
}
|
|
62
|
+
|
|
63
|
+
const driftLogContent = readFileSync(driftLogPath, 'utf-8');
|
|
64
|
+
|
|
65
|
+
// Check each drift comment has a matching entry in DRIFT-LOG.md
|
|
66
|
+
for (const dc of driftComments) {
|
|
67
|
+
results.total++;
|
|
68
|
+
// Check if the file is mentioned in DRIFT-LOG.md
|
|
69
|
+
if (driftLogContent.includes(dc.file)) {
|
|
70
|
+
results.passed++;
|
|
71
|
+
} else {
|
|
72
|
+
results.errors.push(
|
|
73
|
+
`${dc.file}:${dc.line} — DRIFT comment not logged in DRIFT-LOG.md`
|
|
74
|
+
);
|
|
75
|
+
}
|
|
76
|
+
}
|
|
77
|
+
|
|
78
|
+
return results;
|
|
79
|
+
}
|
|
80
|
+
|
|
81
|
+
function walkDir(dir, callback) {
|
|
82
|
+
if (!existsSync(dir)) return;
|
|
83
|
+
|
|
84
|
+
const entries = readdirSync(dir);
|
|
85
|
+
for (const entry of entries) {
|
|
86
|
+
if (IGNORE_DIRS.has(entry)) continue;
|
|
87
|
+
if (entry.startsWith('.')) continue;
|
|
88
|
+
|
|
89
|
+
const fullPath = join(dir, entry);
|
|
90
|
+
try {
|
|
91
|
+
const stat = statSync(fullPath);
|
|
92
|
+
if (stat.isDirectory()) {
|
|
93
|
+
walkDir(fullPath, callback);
|
|
94
|
+
} else if (stat.isFile()) {
|
|
95
|
+
callback(fullPath);
|
|
96
|
+
}
|
|
97
|
+
} catch {
|
|
98
|
+
// Skip files we can't read
|
|
99
|
+
}
|
|
100
|
+
}
|
|
101
|
+
}
|
|
@@ -0,0 +1,70 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Environment Validator — Checks ENVIRONMENT.md docs and .env.example
|
|
3
|
+
* Now respects projectTypeConfig (e.g., skip env checks for CLI tools)
|
|
4
|
+
*/
|
|
5
|
+
|
|
6
|
+
import { existsSync, readFileSync } from 'node:fs';
|
|
7
|
+
import { resolve } from 'node:path';
|
|
8
|
+
|
|
9
|
+
export function validateEnvironment(projectDir, config) {
|
|
10
|
+
const results = { name: 'environment', errors: [], warnings: [], passed: 0, total: 0 };
|
|
11
|
+
const ptc = config.projectTypeConfig || {};
|
|
12
|
+
|
|
13
|
+
const envDocPath = resolve(projectDir, 'docs-canonical/ENVIRONMENT.md');
|
|
14
|
+
if (!existsSync(envDocPath)) {
|
|
15
|
+
return results; // Structure validator catches missing files
|
|
16
|
+
}
|
|
17
|
+
|
|
18
|
+
const content = readFileSync(envDocPath, 'utf-8');
|
|
19
|
+
|
|
20
|
+
// Check for required sections
|
|
21
|
+
results.total++;
|
|
22
|
+
if (content.includes('## Prerequisites') || content.includes('## Setup Steps')) {
|
|
23
|
+
results.passed++;
|
|
24
|
+
} else {
|
|
25
|
+
results.warnings.push('ENVIRONMENT.md: missing "## Prerequisites" or "## Setup Steps" section');
|
|
26
|
+
}
|
|
27
|
+
|
|
28
|
+
results.total++;
|
|
29
|
+
if (content.includes('## Environment Variables') || content.includes('## Setup Steps')) {
|
|
30
|
+
results.passed++;
|
|
31
|
+
} else {
|
|
32
|
+
results.warnings.push('ENVIRONMENT.md: missing "## Environment Variables" section');
|
|
33
|
+
}
|
|
34
|
+
|
|
35
|
+
// Only check .env.example if the project type needs it
|
|
36
|
+
if (ptc.needsEnvExample !== false && ptc.needsEnvVars !== false) {
|
|
37
|
+
// Check if .env.example is referenced and exists
|
|
38
|
+
if (content.includes('.env.example')) {
|
|
39
|
+
results.total++;
|
|
40
|
+
if (existsSync(resolve(projectDir, '.env.example'))) {
|
|
41
|
+
results.passed++;
|
|
42
|
+
} else {
|
|
43
|
+
results.warnings.push(
|
|
44
|
+
'ENVIRONMENT.md references .env.example but the file does not exist'
|
|
45
|
+
);
|
|
46
|
+
}
|
|
47
|
+
}
|
|
48
|
+
|
|
49
|
+
// Check if any .env file exists but no .env.example is provided
|
|
50
|
+
results.total++;
|
|
51
|
+
const hasEnvFile = ['.env', '.env.local', '.env.development'].some(f =>
|
|
52
|
+
existsSync(resolve(projectDir, f))
|
|
53
|
+
);
|
|
54
|
+
const hasEnvExample = existsSync(resolve(projectDir, '.env.example'));
|
|
55
|
+
|
|
56
|
+
if (hasEnvFile && !hasEnvExample) {
|
|
57
|
+
results.warnings.push(
|
|
58
|
+
'.env file exists but no .env.example template — new contributors won\'t know what vars to set'
|
|
59
|
+
);
|
|
60
|
+
} else {
|
|
61
|
+
results.passed++;
|
|
62
|
+
}
|
|
63
|
+
} else {
|
|
64
|
+
// CLI/library project — just verify doc exists and has basic content
|
|
65
|
+
results.total++;
|
|
66
|
+
results.passed++;
|
|
67
|
+
}
|
|
68
|
+
|
|
69
|
+
return results;
|
|
70
|
+
}
|