@llm-dev-ops/agentics-cli 2.1.5 → 2.4.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/pipeline/auto-chain.d.ts +190 -0
- package/dist/pipeline/auto-chain.d.ts.map +1 -1
- package/dist/pipeline/auto-chain.js +1571 -72
- package/dist/pipeline/auto-chain.js.map +1 -1
- package/dist/pipeline/phase2/phases/prompt-generator.d.ts.map +1 -1
- package/dist/pipeline/phase2/phases/prompt-generator.js +205 -12
- package/dist/pipeline/phase2/phases/prompt-generator.js.map +1 -1
- package/dist/pipeline/phase2/schemas.d.ts +10 -10
- package/dist/pipeline/phase4/phases/http-server-generator.d.ts +12 -0
- package/dist/pipeline/phase4/phases/http-server-generator.d.ts.map +1 -1
- package/dist/pipeline/phase4/phases/http-server-generator.js +92 -25
- package/dist/pipeline/phase4/phases/http-server-generator.js.map +1 -1
- package/dist/pipeline/phase4-5-pre-render/financial-model.d.ts +51 -0
- package/dist/pipeline/phase4-5-pre-render/financial-model.d.ts.map +1 -0
- package/dist/pipeline/phase4-5-pre-render/financial-model.js +118 -0
- package/dist/pipeline/phase4-5-pre-render/financial-model.js.map +1 -0
- package/dist/pipeline/phase4-5-pre-render/post-render-reconciler.d.ts +53 -0
- package/dist/pipeline/phase4-5-pre-render/post-render-reconciler.d.ts.map +1 -0
- package/dist/pipeline/phase4-5-pre-render/post-render-reconciler.js +130 -0
- package/dist/pipeline/phase4-5-pre-render/post-render-reconciler.js.map +1 -0
- package/dist/pipeline/phase4-5-pre-render/pre-render-coordinator.d.ts +47 -0
- package/dist/pipeline/phase4-5-pre-render/pre-render-coordinator.d.ts.map +1 -0
- package/dist/pipeline/phase4-5-pre-render/pre-render-coordinator.js +105 -0
- package/dist/pipeline/phase4-5-pre-render/pre-render-coordinator.js.map +1 -0
- package/dist/pipeline/phase4-5-pre-render/sector-baselines.d.ts +42 -0
- package/dist/pipeline/phase4-5-pre-render/sector-baselines.d.ts.map +1 -0
- package/dist/pipeline/phase4-5-pre-render/sector-baselines.js +117 -0
- package/dist/pipeline/phase4-5-pre-render/sector-baselines.js.map +1 -0
- package/dist/pipeline/phase5-build/phase5-build-coordinator.d.ts.map +1 -1
- package/dist/pipeline/phase5-build/phase5-build-coordinator.js +44 -0
- package/dist/pipeline/phase5-build/phase5-build-coordinator.js.map +1 -1
- package/dist/pipeline/phase5-build/phases/post-generation-validator.d.ts +75 -0
- package/dist/pipeline/phase5-build/phases/post-generation-validator.d.ts.map +1 -0
- package/dist/pipeline/phase5-build/phases/post-generation-validator.js +1068 -0
- package/dist/pipeline/phase5-build/phases/post-generation-validator.js.map +1 -0
- package/dist/pipeline/phase5-build/types.d.ts +1 -1
- package/dist/pipeline/phase5-build/types.d.ts.map +1 -1
- package/dist/pipeline/types.d.ts +87 -0
- package/dist/pipeline/types.d.ts.map +1 -1
- package/dist/pipeline/types.js +51 -1
- package/dist/pipeline/types.js.map +1 -1
- package/dist/synthesis/consensus-svg.d.ts +19 -0
- package/dist/synthesis/consensus-svg.d.ts.map +1 -0
- package/dist/synthesis/consensus-svg.js +95 -0
- package/dist/synthesis/consensus-svg.js.map +1 -0
- package/dist/synthesis/consensus-tiers.d.ts +99 -0
- package/dist/synthesis/consensus-tiers.d.ts.map +1 -0
- package/dist/synthesis/consensus-tiers.js +285 -0
- package/dist/synthesis/consensus-tiers.js.map +1 -0
- package/dist/synthesis/domain-labor-classifier.d.ts +101 -0
- package/dist/synthesis/domain-labor-classifier.d.ts.map +1 -0
- package/dist/synthesis/domain-labor-classifier.js +312 -0
- package/dist/synthesis/domain-labor-classifier.js.map +1 -0
- package/dist/synthesis/domain-unit-registry.d.ts +59 -0
- package/dist/synthesis/domain-unit-registry.d.ts.map +1 -0
- package/dist/synthesis/domain-unit-registry.js +320 -0
- package/dist/synthesis/domain-unit-registry.js.map +1 -0
- package/dist/synthesis/financial-claim-extractor.d.ts +72 -0
- package/dist/synthesis/financial-claim-extractor.d.ts.map +1 -0
- package/dist/synthesis/financial-claim-extractor.js +382 -0
- package/dist/synthesis/financial-claim-extractor.js.map +1 -0
- package/dist/synthesis/financial-consistency-rules.d.ts +70 -0
- package/dist/synthesis/financial-consistency-rules.d.ts.map +1 -0
- package/dist/synthesis/financial-consistency-rules.js +483 -0
- package/dist/synthesis/financial-consistency-rules.js.map +1 -0
- package/dist/synthesis/financial-consistency-runner.d.ts +73 -0
- package/dist/synthesis/financial-consistency-runner.d.ts.map +1 -0
- package/dist/synthesis/financial-consistency-runner.js +131 -0
- package/dist/synthesis/financial-consistency-runner.js.map +1 -0
- package/dist/synthesis/forbidden-spin-phrases.d.ts +32 -0
- package/dist/synthesis/forbidden-spin-phrases.d.ts.map +1 -0
- package/dist/synthesis/forbidden-spin-phrases.js +84 -0
- package/dist/synthesis/forbidden-spin-phrases.js.map +1 -0
- package/dist/synthesis/phase-gate-thresholds.d.ts +30 -0
- package/dist/synthesis/phase-gate-thresholds.d.ts.map +1 -0
- package/dist/synthesis/phase-gate-thresholds.js +34 -0
- package/dist/synthesis/phase-gate-thresholds.js.map +1 -0
- package/dist/synthesis/prompts/index.d.ts.map +1 -1
- package/dist/synthesis/prompts/index.js +22 -0
- package/dist/synthesis/prompts/index.js.map +1 -1
- package/dist/synthesis/roadmap-dates.d.ts +72 -0
- package/dist/synthesis/roadmap-dates.d.ts.map +1 -0
- package/dist/synthesis/roadmap-dates.js +203 -0
- package/dist/synthesis/roadmap-dates.js.map +1 -0
- package/dist/synthesis/simulation-artifact-generator.d.ts.map +1 -1
- package/dist/synthesis/simulation-artifact-generator.js +135 -1
- package/dist/synthesis/simulation-artifact-generator.js.map +1 -1
- package/dist/synthesis/simulation-renderers.d.ts +105 -2
- package/dist/synthesis/simulation-renderers.d.ts.map +1 -1
- package/dist/synthesis/simulation-renderers.js +1192 -123
- package/dist/synthesis/simulation-renderers.js.map +1 -1
- package/dist/synthesis/unit-economics-loader.d.ts +71 -0
- package/dist/synthesis/unit-economics-loader.d.ts.map +1 -0
- package/dist/synthesis/unit-economics-loader.js +200 -0
- package/dist/synthesis/unit-economics-loader.js.map +1 -0
- package/package.json +1 -1
|
@@ -0,0 +1,1068 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Phase 5 Build — Stage 7: Post-Generation Code Validator
|
|
3
|
+
*
|
|
4
|
+
* Implements ADR-PIPELINE-065 (which is the implementation spec for the
|
|
5
|
+
* Accepted-but-unimplemented ADR-PIPELINE-042 Post-Generation Code Validation).
|
|
6
|
+
*
|
|
7
|
+
* Unlike `implementation-quality-gate.ts` which is a PRE-generation prompt
|
|
8
|
+
* quality gate (it scans Phase 2 artifacts BEFORE code is generated), this
|
|
9
|
+
* validator scans the GENERATED project tree AFTER all other Phase 5 stages
|
|
10
|
+
* have run. It applies 10 rules that enforce the patterns documented in
|
|
11
|
+
* ADR-PIPELINE-046/047/049/051/059/064 and catches regressions in observability,
|
|
12
|
+
* typed API boundaries, audit trails, and test coverage.
|
|
13
|
+
*
|
|
14
|
+
* Non-blocking by default. When AGENTICS_STRICT_POSTGEN=true, any rule at
|
|
15
|
+
* severity 'error' fails the stage and — per coordinator configuration —
|
|
16
|
+
* can block the pipeline.
|
|
17
|
+
*
|
|
18
|
+
* Error codes:
|
|
19
|
+
* ECLI-P5-040 Post-generation validation failed (strict mode)
|
|
20
|
+
* ECLI-P5-041 Generated project directory not found
|
|
21
|
+
*/
|
|
22
|
+
import * as fs from 'node:fs';
|
|
23
|
+
import * as path from 'node:path';
|
|
24
|
+
import { createSpan, endSpan, emitSpan } from '../../phase2/telemetry.js';
|
|
25
|
+
import { OWNED_SCAFFOLD_MODULES } from '../../auto-chain.js';
|
|
26
|
+
// ============================================================================
|
|
27
|
+
// File Walking & Helpers
|
|
28
|
+
// ============================================================================
|
|
29
|
+
const SCAN_EXTENSIONS = new Set(['.ts']);
|
|
30
|
+
const SKIP_DIRS = new Set(['node_modules', 'dist', 'build', '.git', 'coverage', '.next']);
|
|
31
|
+
/**
|
|
32
|
+
* Walk a directory recursively and return a map of relative paths → file contents.
|
|
33
|
+
* Safe: ignores unreadable files, caps file size at 1MB to avoid blowup.
|
|
34
|
+
*/
|
|
35
|
+
function loadProjectFiles(rootDir) {
|
|
36
|
+
const files = new Map();
|
|
37
|
+
if (!fs.existsSync(rootDir))
|
|
38
|
+
return files;
|
|
39
|
+
const walk = (currentDir) => {
|
|
40
|
+
let entries;
|
|
41
|
+
try {
|
|
42
|
+
entries = fs.readdirSync(currentDir, { withFileTypes: true });
|
|
43
|
+
}
|
|
44
|
+
catch {
|
|
45
|
+
return;
|
|
46
|
+
}
|
|
47
|
+
for (const entry of entries) {
|
|
48
|
+
if (entry.isDirectory()) {
|
|
49
|
+
if (SKIP_DIRS.has(entry.name))
|
|
50
|
+
continue;
|
|
51
|
+
walk(path.join(currentDir, entry.name));
|
|
52
|
+
continue;
|
|
53
|
+
}
|
|
54
|
+
const ext = path.extname(entry.name);
|
|
55
|
+
if (!SCAN_EXTENSIONS.has(ext))
|
|
56
|
+
continue;
|
|
57
|
+
const fullPath = path.join(currentDir, entry.name);
|
|
58
|
+
try {
|
|
59
|
+
const stat = fs.statSync(fullPath);
|
|
60
|
+
if (stat.size > 1_000_000)
|
|
61
|
+
continue; // skip files > 1MB
|
|
62
|
+
const content = fs.readFileSync(fullPath, 'utf-8');
|
|
63
|
+
const relPath = path.relative(rootDir, fullPath);
|
|
64
|
+
files.set(relPath, content);
|
|
65
|
+
}
|
|
66
|
+
catch {
|
|
67
|
+
// skip unreadable
|
|
68
|
+
}
|
|
69
|
+
}
|
|
70
|
+
};
|
|
71
|
+
walk(rootDir);
|
|
72
|
+
return files;
|
|
73
|
+
}
|
|
74
|
+
/**
|
|
75
|
+
* Convert a character offset into a 1-based line number.
|
|
76
|
+
*/
|
|
77
|
+
function lineAt(content, offset) {
|
|
78
|
+
let line = 1;
|
|
79
|
+
for (let i = 0; i < offset && i < content.length; i++) {
|
|
80
|
+
if (content.charCodeAt(i) === 10 /* \n */)
|
|
81
|
+
line++;
|
|
82
|
+
}
|
|
83
|
+
return line;
|
|
84
|
+
}
|
|
85
|
+
/**
|
|
86
|
+
* Path predicates used by multiple rules.
|
|
87
|
+
*/
|
|
88
|
+
function isTestFile(relPath) {
|
|
89
|
+
const n = relPath.replace(/\\/g, '/').toLowerCase();
|
|
90
|
+
return n.endsWith('.test.ts') || n.endsWith('.spec.ts') || n.includes('/tests/') || n.includes('/__tests__/');
|
|
91
|
+
}
|
|
92
|
+
function isScriptOrDemo(relPath) {
|
|
93
|
+
const n = relPath.replace(/\\/g, '/').toLowerCase();
|
|
94
|
+
return n.includes('/scripts/') || n.includes('/demo') || n.endsWith('/demo.ts');
|
|
95
|
+
}
|
|
96
|
+
function isServerFile(relPath) {
|
|
97
|
+
const n = relPath.replace(/\\/g, '/').toLowerCase();
|
|
98
|
+
return n.includes('/server/') || n.includes('/routes/') || n.endsWith('/app.ts') || n.endsWith('/api.ts');
|
|
99
|
+
}
|
|
100
|
+
function isServiceFile(relPath) {
|
|
101
|
+
const n = relPath.replace(/\\/g, '/').toLowerCase();
|
|
102
|
+
return n.includes('/services/') || n.includes('/application/') || n.includes('/domain/services/');
|
|
103
|
+
}
|
|
104
|
+
function isPublicTypesFile(relPath) {
|
|
105
|
+
const n = relPath.replace(/\\/g, '/').toLowerCase();
|
|
106
|
+
return n.includes('/domain/types/') || n.includes('/contracts/') || n.endsWith('/types.ts');
|
|
107
|
+
}
|
|
108
|
+
/**
|
|
109
|
+
* Collect all regex matches with line numbers for a per-file scan.
|
|
110
|
+
*/
|
|
111
|
+
function collectMatches(content, pattern, filePath, ruleId, severity, message) {
|
|
112
|
+
const findings = [];
|
|
113
|
+
const re = new RegExp(pattern.source, pattern.flags.includes('g') ? pattern.flags : pattern.flags + 'g');
|
|
114
|
+
let match;
|
|
115
|
+
while ((match = re.exec(content)) !== null) {
|
|
116
|
+
findings.push({
|
|
117
|
+
ruleId,
|
|
118
|
+
filePath,
|
|
119
|
+
line: lineAt(content, match.index),
|
|
120
|
+
message,
|
|
121
|
+
severity,
|
|
122
|
+
});
|
|
123
|
+
if (match.index === re.lastIndex)
|
|
124
|
+
re.lastIndex++; // guard against zero-width loops
|
|
125
|
+
}
|
|
126
|
+
return findings;
|
|
127
|
+
}
|
|
128
|
+
// ============================================================================
|
|
129
|
+
// Rule Definitions — 10 rules per ADR-PIPELINE-065
|
|
130
|
+
// ============================================================================
|
|
131
|
+
/** PGV-001: No `as string` / `as any` casts on route-handler inputs. */
|
|
132
|
+
const RULE_PGV_001 = {
|
|
133
|
+
id: 'PGV-001',
|
|
134
|
+
title: 'No `as string` / `as any` casts on route handler context or request',
|
|
135
|
+
severity: 'error',
|
|
136
|
+
adrReference: 'ADR-PIPELINE-064',
|
|
137
|
+
check: (files) => {
|
|
138
|
+
const findings = [];
|
|
139
|
+
// Hono: c.get('x') as string
|
|
140
|
+
const honoPattern = /c\.get\(\s*['"][\w.-]+['"]\s*\)\s*as\s+(?:string|any|unknown|Record<[^>]+>)/g;
|
|
141
|
+
// Express: (req as any).x OR req.headers['x-foo'] as string
|
|
142
|
+
const expressReqPattern = /\(\s*req\s+as\s+any\s*\)/g;
|
|
143
|
+
const expressHeaderPattern = /req\.headers\[['"][^'"]+['"]\]\s*as\s+(?:string|any)/g;
|
|
144
|
+
for (const [filePath, content] of files) {
|
|
145
|
+
if (isTestFile(filePath) || !isServerFile(filePath))
|
|
146
|
+
continue;
|
|
147
|
+
findings.push(...collectMatches(content, honoPattern, filePath, 'PGV-001', 'error', 'Cast `as string` on Hono `c.get()` — use typed `Hono<{ Variables: ContextVariables }>` generic (ADR-PIPELINE-064).'));
|
|
148
|
+
findings.push(...collectMatches(content, expressReqPattern, filePath, 'PGV-001', 'error', 'Cast `(req as any)` — declare `declare global { namespace Express { interface Request { ... } } }` (ADR-PIPELINE-064).'));
|
|
149
|
+
findings.push(...collectMatches(content, expressHeaderPattern, filePath, 'PGV-001', 'error', 'Cast `req.headers[\'x-foo\'] as string` — use middleware to parse headers into typed request properties (ADR-PIPELINE-064).'));
|
|
150
|
+
}
|
|
151
|
+
return findings;
|
|
152
|
+
},
|
|
153
|
+
};
|
|
154
|
+
/** PGV-002: `new Hono()` must be typed with a Variables generic. */
|
|
155
|
+
const RULE_PGV_002 = {
|
|
156
|
+
id: 'PGV-002',
|
|
157
|
+
title: '`new Hono()` must be parameterized with Variables generic',
|
|
158
|
+
severity: 'error',
|
|
159
|
+
adrReference: 'ADR-PIPELINE-064',
|
|
160
|
+
check: (files) => {
|
|
161
|
+
const findings = [];
|
|
162
|
+
// Match `new Hono(` NOT followed by `<`
|
|
163
|
+
const pattern = /new\s+Hono\s*\((?!\s*<)/g;
|
|
164
|
+
for (const [filePath, content] of files) {
|
|
165
|
+
if (isTestFile(filePath) || !isServerFile(filePath))
|
|
166
|
+
continue;
|
|
167
|
+
findings.push(...collectMatches(content, pattern, filePath, 'PGV-002', 'error', '`new Hono()` without type parameter — use `new Hono<{ Variables: ContextVariables }>()` so `c.get()` returns typed values (ADR-PIPELINE-064).'));
|
|
168
|
+
}
|
|
169
|
+
return findings;
|
|
170
|
+
},
|
|
171
|
+
};
|
|
172
|
+
/** PGV-003: No hardcoded magic numbers in service files. */
|
|
173
|
+
const RULE_PGV_003 = {
|
|
174
|
+
id: 'PGV-003',
|
|
175
|
+
title: 'No hardcoded magic numbers in service files',
|
|
176
|
+
severity: 'warn',
|
|
177
|
+
adrReference: 'ADR-PIPELINE-046',
|
|
178
|
+
check: (files) => {
|
|
179
|
+
const findings = [];
|
|
180
|
+
// Numbers with 4+ digits appearing as a standalone literal in expressions
|
|
181
|
+
// Skip: year constants (19xx/20xx), port numbers inside env fallbacks,
|
|
182
|
+
// Zod default() calls, and values inside string literals.
|
|
183
|
+
const pattern = /(?<![\w$.'"`])(\d{4,})(?![\w$'"`])/g;
|
|
184
|
+
for (const [filePath, content] of files) {
|
|
185
|
+
if (isTestFile(filePath) || !isServiceFile(filePath))
|
|
186
|
+
continue;
|
|
187
|
+
let match;
|
|
188
|
+
const re = new RegExp(pattern.source, pattern.flags);
|
|
189
|
+
while ((match = re.exec(content)) !== null) {
|
|
190
|
+
const num = parseInt(match[1], 10);
|
|
191
|
+
// Skip year literals
|
|
192
|
+
if (num >= 1900 && num <= 2100)
|
|
193
|
+
continue;
|
|
194
|
+
// Skip common non-magic numbers (bit widths, HTTP status, etc.)
|
|
195
|
+
if ([1024, 2048, 4096, 8192, 16384, 32768, 65536].includes(num))
|
|
196
|
+
continue;
|
|
197
|
+
if (num >= 100 && num <= 599)
|
|
198
|
+
continue; // HTTP status-like
|
|
199
|
+
// Skip if within a default() call
|
|
200
|
+
const windowStart = Math.max(0, match.index - 40);
|
|
201
|
+
const window = content.slice(windowStart, match.index + 10);
|
|
202
|
+
if (/\.default\s*\(\s*$/.test(window))
|
|
203
|
+
continue;
|
|
204
|
+
if (/z\.coerce\.(number|int)\(\)/.test(window))
|
|
205
|
+
continue;
|
|
206
|
+
// Skip if inside a string literal (crude heuristic: same line has matching quotes around the number)
|
|
207
|
+
const lineStart = content.lastIndexOf('\n', match.index) + 1;
|
|
208
|
+
const lineEnd = content.indexOf('\n', match.index);
|
|
209
|
+
const lineText = content.slice(lineStart, lineEnd === -1 ? content.length : lineEnd);
|
|
210
|
+
const numPosInLine = match.index - lineStart;
|
|
211
|
+
const before = lineText.slice(0, numPosInLine);
|
|
212
|
+
const quoteCount = (before.match(/(?<!\\)['"`]/g) ?? []).length;
|
|
213
|
+
if (quoteCount % 2 === 1)
|
|
214
|
+
continue; // inside a string
|
|
215
|
+
// Skip comments
|
|
216
|
+
if (/^\s*\/\//.test(lineText) || /^\s*\*/.test(lineText))
|
|
217
|
+
continue;
|
|
218
|
+
findings.push({
|
|
219
|
+
ruleId: 'PGV-003',
|
|
220
|
+
filePath,
|
|
221
|
+
line: lineAt(content, match.index),
|
|
222
|
+
severity: 'warn',
|
|
223
|
+
message: `Magic number ${num} in service file — externalize via config or typed constant (ADR-PIPELINE-046).`,
|
|
224
|
+
});
|
|
225
|
+
}
|
|
226
|
+
}
|
|
227
|
+
return findings;
|
|
228
|
+
},
|
|
229
|
+
};
|
|
230
|
+
/** PGV-004: State-mutating methods must write to the audit trail. */
|
|
231
|
+
const RULE_PGV_004 = {
|
|
232
|
+
id: 'PGV-004',
|
|
233
|
+
title: 'State-mutating service methods must call audit.append / audit.record',
|
|
234
|
+
severity: 'warn',
|
|
235
|
+
adrReference: 'ADR-PIPELINE-049',
|
|
236
|
+
check: (files) => {
|
|
237
|
+
const findings = [];
|
|
238
|
+
const mutationPattern = /\b(?:repo|repository)\.(save|update|delete|insert|upsert)\s*\(/;
|
|
239
|
+
const auditPattern = /\b(?:audit|auditService|auditLog)\.(?:append|record|log|write)\s*\(/;
|
|
240
|
+
for (const [filePath, content] of files) {
|
|
241
|
+
if (isTestFile(filePath) || !isServiceFile(filePath))
|
|
242
|
+
continue;
|
|
243
|
+
if (!mutationPattern.test(content))
|
|
244
|
+
continue;
|
|
245
|
+
if (auditPattern.test(content))
|
|
246
|
+
continue;
|
|
247
|
+
findings.push({
|
|
248
|
+
ruleId: 'PGV-004',
|
|
249
|
+
filePath,
|
|
250
|
+
line: 0,
|
|
251
|
+
severity: 'warn',
|
|
252
|
+
message: 'File contains repository write operations but no audit.append()/record() calls. State mutations must be auditable (ADR-PIPELINE-049).',
|
|
253
|
+
});
|
|
254
|
+
}
|
|
255
|
+
return findings;
|
|
256
|
+
},
|
|
257
|
+
};
|
|
258
|
+
/** PGV-005: No raw `console.*` calls in production code. */
|
|
259
|
+
const RULE_PGV_005 = {
|
|
260
|
+
id: 'PGV-005',
|
|
261
|
+
title: 'Use createLogger() instead of console.* in production code',
|
|
262
|
+
severity: 'warn',
|
|
263
|
+
adrReference: 'ADR-PIPELINE-051',
|
|
264
|
+
check: (files) => {
|
|
265
|
+
const findings = [];
|
|
266
|
+
const pattern = /console\.(log|error|warn|info|debug)\s*\(/g;
|
|
267
|
+
for (const [filePath, content] of files) {
|
|
268
|
+
if (isTestFile(filePath) || isScriptOrDemo(filePath))
|
|
269
|
+
continue;
|
|
270
|
+
findings.push(...collectMatches(content, pattern, filePath, 'PGV-005', 'warn', 'Direct `console.*` call — use `createLogger()` for structured logging (ADR-PIPELINE-051).'));
|
|
271
|
+
}
|
|
272
|
+
return findings;
|
|
273
|
+
},
|
|
274
|
+
};
|
|
275
|
+
/** PGV-006: Every route file has a sibling test file. */
|
|
276
|
+
const RULE_PGV_006 = {
|
|
277
|
+
id: 'PGV-006',
|
|
278
|
+
title: 'Route files must have a corresponding .test.ts',
|
|
279
|
+
severity: 'warn',
|
|
280
|
+
adrReference: 'ADR-PIPELINE-059',
|
|
281
|
+
check: (files) => {
|
|
282
|
+
const findings = [];
|
|
283
|
+
const allPaths = new Set(files.keys());
|
|
284
|
+
for (const filePath of files.keys()) {
|
|
285
|
+
const n = filePath.replace(/\\/g, '/').toLowerCase();
|
|
286
|
+
const isRoute = n.includes('/routes/') && n.endsWith('.ts') && !isTestFile(filePath);
|
|
287
|
+
if (!isRoute)
|
|
288
|
+
continue;
|
|
289
|
+
// Check sibling test file
|
|
290
|
+
const base = filePath.replace(/\.ts$/i, '');
|
|
291
|
+
const candidates = [`${base}.test.ts`, `${base}.spec.ts`];
|
|
292
|
+
const hasTest = candidates.some(c => allPaths.has(c));
|
|
293
|
+
if (hasTest)
|
|
294
|
+
continue;
|
|
295
|
+
// Also allow a test file in ../tests/ with the same base name
|
|
296
|
+
const baseName = path.basename(base);
|
|
297
|
+
const looseMatch = Array.from(allPaths).some(p => {
|
|
298
|
+
const pn = p.replace(/\\/g, '/').toLowerCase();
|
|
299
|
+
return isTestFile(p) && pn.includes(`/${baseName}.`);
|
|
300
|
+
});
|
|
301
|
+
if (looseMatch)
|
|
302
|
+
continue;
|
|
303
|
+
findings.push({
|
|
304
|
+
ruleId: 'PGV-006',
|
|
305
|
+
filePath,
|
|
306
|
+
line: 0,
|
|
307
|
+
severity: 'warn',
|
|
308
|
+
message: `Route file has no corresponding .test.ts (ADR-PIPELINE-059). Expected one of: ${candidates.join(', ')}`,
|
|
309
|
+
});
|
|
310
|
+
}
|
|
311
|
+
return findings;
|
|
312
|
+
},
|
|
313
|
+
};
|
|
314
|
+
/** PGV-007: Seed data validates via Zod at load. */
|
|
315
|
+
const RULE_PGV_007 = {
|
|
316
|
+
id: 'PGV-007',
|
|
317
|
+
title: 'Seed data must validate via Zod at load',
|
|
318
|
+
severity: 'warn',
|
|
319
|
+
adrReference: 'ADR-PIPELINE-046',
|
|
320
|
+
check: (files) => {
|
|
321
|
+
const findings = [];
|
|
322
|
+
for (const [filePath, content] of files) {
|
|
323
|
+
const n = filePath.replace(/\\/g, '/').toLowerCase();
|
|
324
|
+
if (!n.endsWith('/seed.ts') && !n.endsWith('/data/seed.ts'))
|
|
325
|
+
continue;
|
|
326
|
+
if (isTestFile(filePath))
|
|
327
|
+
continue;
|
|
328
|
+
if (/\.\s*(?:safeParse|parse)\s*\(/.test(content))
|
|
329
|
+
continue;
|
|
330
|
+
findings.push({
|
|
331
|
+
ruleId: 'PGV-007',
|
|
332
|
+
filePath,
|
|
333
|
+
line: 0,
|
|
334
|
+
severity: 'warn',
|
|
335
|
+
message: 'Seed data file does not call .parse()/.safeParse() — exported arrays should be validated at load (ADR-PIPELINE-046).',
|
|
336
|
+
});
|
|
337
|
+
}
|
|
338
|
+
return findings;
|
|
339
|
+
},
|
|
340
|
+
};
|
|
341
|
+
/** PGV-008: No `Record<string, unknown | any>` in public DTO types. */
|
|
342
|
+
const RULE_PGV_008 = {
|
|
343
|
+
id: 'PGV-008',
|
|
344
|
+
title: 'No Record<string, unknown|any> in public DTO types',
|
|
345
|
+
severity: 'warn',
|
|
346
|
+
adrReference: 'ADR-PIPELINE-039',
|
|
347
|
+
check: (files) => {
|
|
348
|
+
const findings = [];
|
|
349
|
+
const pattern = /Record\s*<\s*string\s*,\s*(?:unknown|any)\s*>/g;
|
|
350
|
+
for (const [filePath, content] of files) {
|
|
351
|
+
if (isTestFile(filePath) || !isPublicTypesFile(filePath))
|
|
352
|
+
continue;
|
|
353
|
+
findings.push(...collectMatches(content, pattern, filePath, 'PGV-008', 'warn', '`Record<string, unknown>` in public type — replace with concrete interface derived from the DDD model (ADR-PIPELINE-039).'));
|
|
354
|
+
}
|
|
355
|
+
return findings;
|
|
356
|
+
},
|
|
357
|
+
};
|
|
358
|
+
/** PGV-009: Circuit breaker has state transition tests. */
|
|
359
|
+
const RULE_PGV_009 = {
|
|
360
|
+
id: 'PGV-009',
|
|
361
|
+
title: 'Circuit breaker must have state transition tests',
|
|
362
|
+
severity: 'warn',
|
|
363
|
+
adrReference: 'ADR-PIPELINE-050',
|
|
364
|
+
check: (files) => {
|
|
365
|
+
const findings = [];
|
|
366
|
+
const cbFiles = [];
|
|
367
|
+
for (const filePath of files.keys()) {
|
|
368
|
+
const n = filePath.replace(/\\/g, '/').toLowerCase();
|
|
369
|
+
if ((n.endsWith('/circuit-breaker.ts') || n.endsWith('/circuitbreaker.ts')) && !isTestFile(filePath)) {
|
|
370
|
+
cbFiles.push(filePath);
|
|
371
|
+
}
|
|
372
|
+
}
|
|
373
|
+
if (cbFiles.length === 0)
|
|
374
|
+
return findings;
|
|
375
|
+
// Look for ANY test file that references all 3 state strings
|
|
376
|
+
const allContent = Array.from(files.entries()).filter(([p]) => isTestFile(p));
|
|
377
|
+
// Match state names with word boundaries. The rule only fires when a
|
|
378
|
+
// circuit-breaker.ts file exists, so false positives on generic "open"
|
|
379
|
+
// are unlikely — we also require the distinctive "half-open" compound.
|
|
380
|
+
const hasTransitionTests = allContent.some(([, c]) => /\bclosed\b/.test(c) && /\bopen\b/.test(c) && /\bhalf[-_]?open\b/.test(c));
|
|
381
|
+
if (hasTransitionTests)
|
|
382
|
+
return findings;
|
|
383
|
+
for (const filePath of cbFiles) {
|
|
384
|
+
findings.push({
|
|
385
|
+
ruleId: 'PGV-009',
|
|
386
|
+
filePath,
|
|
387
|
+
line: 0,
|
|
388
|
+
severity: 'warn',
|
|
389
|
+
message: 'Circuit breaker implementation has no state transition tests (closed → open → half-open → closed). Add tests referencing all three states (ADR-PIPELINE-050).',
|
|
390
|
+
});
|
|
391
|
+
}
|
|
392
|
+
return findings;
|
|
393
|
+
},
|
|
394
|
+
};
|
|
395
|
+
/** PGV-010: Repository interfaces have at least one implementation. */
|
|
396
|
+
const RULE_PGV_010 = {
|
|
397
|
+
id: 'PGV-010',
|
|
398
|
+
title: 'Repository interfaces must have at least one implementation',
|
|
399
|
+
severity: 'warn',
|
|
400
|
+
adrReference: 'ADR-PIPELINE-047',
|
|
401
|
+
check: (files) => {
|
|
402
|
+
const findings = [];
|
|
403
|
+
const interfacePattern = /export\s+interface\s+(\w*Repository)\b/g;
|
|
404
|
+
const interfaces = new Map();
|
|
405
|
+
for (const [filePath, content] of files) {
|
|
406
|
+
if (isTestFile(filePath))
|
|
407
|
+
continue;
|
|
408
|
+
let match;
|
|
409
|
+
const re = new RegExp(interfacePattern.source, interfacePattern.flags);
|
|
410
|
+
while ((match = re.exec(content)) !== null) {
|
|
411
|
+
interfaces.set(match[1], { filePath, line: lineAt(content, match.index) });
|
|
412
|
+
}
|
|
413
|
+
}
|
|
414
|
+
for (const [name, loc] of interfaces) {
|
|
415
|
+
// Search for `implements Name` or `class X implements ... Name`
|
|
416
|
+
const implRegex = new RegExp(`\\bimplements\\b[^{]*\\b${name}\\b`);
|
|
417
|
+
let found = false;
|
|
418
|
+
for (const [p, c] of files) {
|
|
419
|
+
if (isTestFile(p))
|
|
420
|
+
continue;
|
|
421
|
+
if (implRegex.test(c)) {
|
|
422
|
+
found = true;
|
|
423
|
+
break;
|
|
424
|
+
}
|
|
425
|
+
}
|
|
426
|
+
if (found)
|
|
427
|
+
continue;
|
|
428
|
+
findings.push({
|
|
429
|
+
ruleId: 'PGV-010',
|
|
430
|
+
filePath: loc.filePath,
|
|
431
|
+
line: loc.line,
|
|
432
|
+
severity: 'warn',
|
|
433
|
+
message: `Repository interface \`${name}\` has no class implementing it. Provide at least one concrete implementation (ADR-PIPELINE-047).`,
|
|
434
|
+
});
|
|
435
|
+
}
|
|
436
|
+
return findings;
|
|
437
|
+
},
|
|
438
|
+
};
|
|
439
|
+
/** PGV-011: No CommonJS `require(` calls in generated TypeScript (ADR-PIPELINE-068). */
|
|
440
|
+
const RULE_PGV_011 = {
|
|
441
|
+
id: 'PGV-011',
|
|
442
|
+
title: 'No CommonJS require() calls in generated TypeScript',
|
|
443
|
+
severity: 'error',
|
|
444
|
+
adrReference: 'ADR-PIPELINE-068',
|
|
445
|
+
check: (files) => {
|
|
446
|
+
const findings = [];
|
|
447
|
+
// Match `require('...')` / `require("...")` / `require(\`...\`)` —
|
|
448
|
+
// a CJS module load always passes a string literal as its first arg.
|
|
449
|
+
// Constraints:
|
|
450
|
+
// - Char before `require` must NOT be `.` (excludes obj.require)
|
|
451
|
+
// or word/$ (excludes requireSimulationLineage, _require, etc.)
|
|
452
|
+
// - First argument must be a string literal (excludes method
|
|
453
|
+
// definitions like `require(name: string)`)
|
|
454
|
+
// - `createRequire(import.meta.url)` is the one safe interop pattern
|
|
455
|
+
// and is allowed when the file imports from node:module.
|
|
456
|
+
const pattern = /(?:^|[^.\w$])(require)\s*\(\s*['"`]/g;
|
|
457
|
+
const createRequirePattern = /createRequire\s*\(/;
|
|
458
|
+
for (const [filePath, content] of files) {
|
|
459
|
+
// Skip tests, scripts, and generated demo files. Demo is covered by
|
|
460
|
+
// ADR-068's banner rule — not by the require() ban.
|
|
461
|
+
if (isTestFile(filePath) || isScriptOrDemo(filePath))
|
|
462
|
+
continue;
|
|
463
|
+
// Type-only declaration files have no runtime behavior.
|
|
464
|
+
if (filePath.endsWith('.d.ts'))
|
|
465
|
+
continue;
|
|
466
|
+
const hasCreateRequire = createRequirePattern.test(content);
|
|
467
|
+
const re = new RegExp(pattern.source, 'g');
|
|
468
|
+
let match;
|
|
469
|
+
while ((match = re.exec(content)) !== null) {
|
|
470
|
+
// `createRequire(import.meta.url)` produces a `require` binding
|
|
471
|
+
// that's legal in ESM — don't flag its call sites when the file
|
|
472
|
+
// contains the createRequire import.
|
|
473
|
+
if (hasCreateRequire)
|
|
474
|
+
continue;
|
|
475
|
+
// Skip matches inside comments or string literals (rough check).
|
|
476
|
+
const start = match.index + (match[0].length - 'require('.length);
|
|
477
|
+
const lineStart = content.lastIndexOf('\n', start) + 1;
|
|
478
|
+
const prefix = content.slice(lineStart, start);
|
|
479
|
+
if (/^\s*\/\//.test(prefix) || /^\s*\*/.test(prefix))
|
|
480
|
+
continue;
|
|
481
|
+
findings.push({
|
|
482
|
+
ruleId: 'PGV-011',
|
|
483
|
+
filePath,
|
|
484
|
+
line: lineAt(content, start),
|
|
485
|
+
severity: 'error',
|
|
486
|
+
message: 'CommonJS `require()` in generated ESM project — use `import` instead, or `createRequire(import.meta.url)` for CJS interop. Silent `require()` throws at runtime under "type":"module" and severs simulation lineage (ADR-PIPELINE-068).',
|
|
487
|
+
});
|
|
488
|
+
if (match.index === re.lastIndex)
|
|
489
|
+
re.lastIndex++;
|
|
490
|
+
}
|
|
491
|
+
}
|
|
492
|
+
return findings;
|
|
493
|
+
},
|
|
494
|
+
};
|
|
495
|
+
/** PGV-012: No generator-emitted file may redeclare a scaffold-owned export (ADR-PIPELINE-069). */
|
|
496
|
+
const RULE_PGV_012 = {
|
|
497
|
+
id: 'PGV-012',
|
|
498
|
+
title: 'Generator files must not redeclare scaffold-owned exports',
|
|
499
|
+
severity: 'error',
|
|
500
|
+
adrReference: 'ADR-PIPELINE-069',
|
|
501
|
+
check: (files) => {
|
|
502
|
+
const findings = [];
|
|
503
|
+
// Build (exportName -> ownedPath) lookup. Skip the scaffold-owned
|
|
504
|
+
// files themselves (matched by basename) since they're allowed to
|
|
505
|
+
// declare their own exports.
|
|
506
|
+
const exportToOwned = new Map();
|
|
507
|
+
const ownedBasenames = new Set();
|
|
508
|
+
for (const mod of OWNED_SCAFFOLD_MODULES) {
|
|
509
|
+
ownedBasenames.add(path.basename(mod.path));
|
|
510
|
+
for (const ex of mod.exports) {
|
|
511
|
+
if (!exportToOwned.has(ex))
|
|
512
|
+
exportToOwned.set(ex, mod.path);
|
|
513
|
+
}
|
|
514
|
+
}
|
|
515
|
+
for (const [filePath, content] of files) {
|
|
516
|
+
const basename = path.basename(filePath);
|
|
517
|
+
// Skip the scaffold-owned files themselves
|
|
518
|
+
if (ownedBasenames.has(basename))
|
|
519
|
+
continue;
|
|
520
|
+
// Skip tests, scripts, demos, and type-only declarations
|
|
521
|
+
if (isTestFile(filePath) || isScriptOrDemo(filePath))
|
|
522
|
+
continue;
|
|
523
|
+
if (filePath.endsWith('.d.ts'))
|
|
524
|
+
continue;
|
|
525
|
+
for (const [exportName, ownedPath] of exportToOwned) {
|
|
526
|
+
// Match `export class|function|const|let|interface|type|enum NAME`
|
|
527
|
+
const declRe = new RegExp(`\\bexport\\s+(?:class|function|const|let|interface|type|enum)\\s+${exportName}\\b`, 'g');
|
|
528
|
+
// Match `export { NAME }` (re-export named) — both `export { NAME }` and
|
|
529
|
+
// `export { NAME as ... }`. The wildcard inside braces tolerates
|
|
530
|
+
// extra exports on the same line.
|
|
531
|
+
const reExportRe = new RegExp(`\\bexport\\s*\\{[^}]*\\b${exportName}\\b[^}]*\\}`, 'g');
|
|
532
|
+
let m;
|
|
533
|
+
while ((m = declRe.exec(content)) !== null) {
|
|
534
|
+
findings.push({
|
|
535
|
+
ruleId: 'PGV-012',
|
|
536
|
+
filePath,
|
|
537
|
+
line: lineAt(content, m.index),
|
|
538
|
+
severity: 'error',
|
|
539
|
+
message: `Redeclaration of scaffold-owned export \`${exportName}\` (owned by ${ownedPath}). Import from the scaffold path instead of redefining (ADR-PIPELINE-069).`,
|
|
540
|
+
});
|
|
541
|
+
if (m.index === declRe.lastIndex)
|
|
542
|
+
declRe.lastIndex++;
|
|
543
|
+
}
|
|
544
|
+
let r;
|
|
545
|
+
while ((r = reExportRe.exec(content)) !== null) {
|
|
546
|
+
findings.push({
|
|
547
|
+
ruleId: 'PGV-012',
|
|
548
|
+
filePath,
|
|
549
|
+
line: lineAt(content, r.index),
|
|
550
|
+
severity: 'error',
|
|
551
|
+
message: `Re-export of scaffold-owned name \`${exportName}\` from a non-scaffold file (owned by ${ownedPath}). Import directly from the scaffold path (ADR-PIPELINE-069).`,
|
|
552
|
+
});
|
|
553
|
+
if (r.index === reExportRe.lastIndex)
|
|
554
|
+
reExportRe.lastIndex++;
|
|
555
|
+
}
|
|
556
|
+
}
|
|
557
|
+
}
|
|
558
|
+
return findings;
|
|
559
|
+
},
|
|
560
|
+
};
|
|
561
|
+
/** PGV-016: Ban module-level correlation-ID mutable state (ADR-PIPELINE-074). */
|
|
562
|
+
const RULE_PGV_016 = {
|
|
563
|
+
id: 'PGV-016',
|
|
564
|
+
title: 'No module-level `let currentCorrelationId` in logger / logging files',
|
|
565
|
+
severity: 'error',
|
|
566
|
+
adrReference: 'ADR-PIPELINE-074',
|
|
567
|
+
check: (files) => {
|
|
568
|
+
const findings = [];
|
|
569
|
+
// Target: any variable binding named correlation* at module scope in
|
|
570
|
+
// logger-shaped files. Inside functions / classes the pattern is fine
|
|
571
|
+
// (AsyncLocalStorage stores are allowed); it's only the MODULE-LEVEL
|
|
572
|
+
// mutable that breaks under concurrent requests.
|
|
573
|
+
const pattern = /^(?:let|var)\s+(\w*[cC]orrelation[A-Za-z0-9_]*)\b/gm;
|
|
574
|
+
for (const [filePath, content] of files) {
|
|
575
|
+
const n = filePath.replace(/\\/g, '/').toLowerCase();
|
|
576
|
+
// Match scaffolded logger files + any generator-emitted logging module
|
|
577
|
+
if (!(n.endsWith('/logger.ts') || n.includes('/logging/') || n.endsWith('/correlation.ts')))
|
|
578
|
+
continue;
|
|
579
|
+
if (isTestFile(filePath))
|
|
580
|
+
continue;
|
|
581
|
+
let match;
|
|
582
|
+
const re = new RegExp(pattern.source, 'gm');
|
|
583
|
+
while ((match = re.exec(content)) !== null) {
|
|
584
|
+
findings.push({
|
|
585
|
+
ruleId: 'PGV-016',
|
|
586
|
+
filePath,
|
|
587
|
+
line: lineAt(content, match.index),
|
|
588
|
+
severity: 'error',
|
|
589
|
+
message: `Module-level mutable \`${match[1]}\` in logger/logging file — concurrent requests will cross-contaminate log lines. ` +
|
|
590
|
+
'Use `runWithCorrelation` + AsyncLocalStorage from the scaffolded `src/logger.ts` instead (ADR-PIPELINE-074).',
|
|
591
|
+
});
|
|
592
|
+
if (match.index === re.lastIndex)
|
|
593
|
+
re.lastIndex++;
|
|
594
|
+
}
|
|
595
|
+
}
|
|
596
|
+
return findings;
|
|
597
|
+
},
|
|
598
|
+
};
|
|
599
|
+
/** PGV-017: Ban Map-backed service stores (ADR-PIPELINE-075). */
|
|
600
|
+
const RULE_PGV_017 = {
|
|
601
|
+
id: 'PGV-017',
|
|
602
|
+
title: 'Service layers must use Repository<T>, not new Map<string, T>',
|
|
603
|
+
severity: 'error',
|
|
604
|
+
adrReference: 'ADR-PIPELINE-075',
|
|
605
|
+
check: (files) => {
|
|
606
|
+
const findings = [];
|
|
607
|
+
// Match a class field declared as `new Map<string, T>`. We look for
|
|
608
|
+
// the field-binding form specifically (private/public/readonly/protected
|
|
609
|
+
// prefix + identifier + assignment) so local variables and request-
|
|
610
|
+
// scoped caches inside methods don't trigger.
|
|
611
|
+
const fieldPattern = /^\s*(?:private|public|protected)\s+(?:readonly\s+)?\w+\s*(?::\s*[^=]+)?=\s*new\s+Map\s*<\s*string\s*,/gm;
|
|
612
|
+
for (const [filePath, content] of files) {
|
|
613
|
+
if (isTestFile(filePath))
|
|
614
|
+
continue;
|
|
615
|
+
// Scope: only flag inside service / application layers
|
|
616
|
+
const n = filePath.replace(/\\/g, '/').toLowerCase();
|
|
617
|
+
const isServiceLayer = n.includes('/services/') ||
|
|
618
|
+
n.includes('/application/') ||
|
|
619
|
+
n.includes('/domain/services/');
|
|
620
|
+
if (!isServiceLayer)
|
|
621
|
+
continue;
|
|
622
|
+
// Only fire when the file defines a *Service class (heuristic).
|
|
623
|
+
// This avoids flagging repositories, controllers, and utility classes
|
|
624
|
+
// that legitimately use Map for in-memory state.
|
|
625
|
+
const serviceClassMatch = content.match(/\bclass\s+(\w*(?:Service|Store|Manager))\b/);
|
|
626
|
+
if (!serviceClassMatch)
|
|
627
|
+
continue;
|
|
628
|
+
const serviceName = serviceClassMatch[1];
|
|
629
|
+
// Soften to a warning when the service clearly depends on a
|
|
630
|
+
// Repository — the Map is probably a request-scoped cache, not the
|
|
631
|
+
// primary store.
|
|
632
|
+
const hasRepositoryDep = /\b(?:private|public|protected|readonly)\s+\w+\s*:\s*\w*Repository\b/.test(content);
|
|
633
|
+
let match;
|
|
634
|
+
const re = new RegExp(fieldPattern.source, 'gm');
|
|
635
|
+
while ((match = re.exec(content)) !== null) {
|
|
636
|
+
findings.push({
|
|
637
|
+
ruleId: 'PGV-017',
|
|
638
|
+
filePath,
|
|
639
|
+
line: lineAt(content, match.index),
|
|
640
|
+
severity: hasRepositoryDep ? 'warn' : 'error',
|
|
641
|
+
message: `Service class \`${serviceName}\` stores state in a Map<string, T> field — use \`Repository<T>\` from ` +
|
|
642
|
+
'`src/persistence/repository.ts` instead. InMemoryRepository for tests, SqliteRepository for production (ADR-PIPELINE-075).',
|
|
643
|
+
});
|
|
644
|
+
if (match.index === re.lastIndex)
|
|
645
|
+
re.lastIndex++;
|
|
646
|
+
}
|
|
647
|
+
}
|
|
648
|
+
return findings;
|
|
649
|
+
},
|
|
650
|
+
};
|
|
651
|
+
/** PGV-018: /metrics + /health/live + /health/ready routes must exist (ADR-PIPELINE-076). */
|
|
652
|
+
const RULE_PGV_018 = {
|
|
653
|
+
id: 'PGV-018',
|
|
654
|
+
title: 'Generated project must expose /metrics, /health/live, /health/ready',
|
|
655
|
+
severity: 'error',
|
|
656
|
+
adrReference: 'ADR-PIPELINE-076',
|
|
657
|
+
check: (files) => {
|
|
658
|
+
const findings = [];
|
|
659
|
+
// Collect server-side files across common layouts. Exclude the
|
|
660
|
+
// scaffold's own api/base-app.ts because those routes are emitted
|
|
661
|
+
// there — PGV-018 is verifying downstream integration, not the
|
|
662
|
+
// scaffold file itself.
|
|
663
|
+
const serverFiles = Array.from(files.entries()).filter(([filePath]) => {
|
|
664
|
+
const n = filePath.replace(/\\/g, '/').toLowerCase();
|
|
665
|
+
if (isTestFile(filePath))
|
|
666
|
+
return false;
|
|
667
|
+
return (n.includes('/server/') ||
|
|
668
|
+
n.includes('/api/') ||
|
|
669
|
+
n.endsWith('/app.ts') ||
|
|
670
|
+
n.endsWith('/server.ts') ||
|
|
671
|
+
n.endsWith('/index.ts'));
|
|
672
|
+
});
|
|
673
|
+
if (serverFiles.length === 0)
|
|
674
|
+
return findings;
|
|
675
|
+
// Two paths satisfy the rule:
|
|
676
|
+
// (a) The project imports createBaseApp from the scaffold (every
|
|
677
|
+
// generated project inherits the three routes automatically).
|
|
678
|
+
// (b) The project defines the three routes explicitly in a server file.
|
|
679
|
+
const anyImportsBaseApp = serverFiles.some(([, content]) => /\bcreateBaseApp\b/.test(content) &&
|
|
680
|
+
/from\s+['"][^'"]*base-app(\.js)?['"]/.test(content));
|
|
681
|
+
if (anyImportsBaseApp)
|
|
682
|
+
return findings;
|
|
683
|
+
const aggregatedContent = serverFiles.map(([, c]) => c).join('\n');
|
|
684
|
+
const missing = [];
|
|
685
|
+
if (!/['"]\/metrics['"]/.test(aggregatedContent))
|
|
686
|
+
missing.push('/metrics');
|
|
687
|
+
if (!/['"]\/health\/live['"]/.test(aggregatedContent))
|
|
688
|
+
missing.push('/health/live');
|
|
689
|
+
if (!/['"]\/health\/ready['"]/.test(aggregatedContent))
|
|
690
|
+
missing.push('/health/ready');
|
|
691
|
+
if (missing.length > 0) {
|
|
692
|
+
const sample = serverFiles[0];
|
|
693
|
+
findings.push({
|
|
694
|
+
ruleId: 'PGV-018',
|
|
695
|
+
filePath: sample[0],
|
|
696
|
+
line: 0,
|
|
697
|
+
severity: 'error',
|
|
698
|
+
message: `Generated project does not expose ${missing.join(', ')}. ` +
|
|
699
|
+
'Import `createBaseApp` from `src/api/base-app.ts` and call `app.route()` ' +
|
|
700
|
+
'for your domain routes; the base app wires /metrics, /health/live, and /health/ready automatically (ADR-PIPELINE-076).',
|
|
701
|
+
});
|
|
702
|
+
}
|
|
703
|
+
return findings;
|
|
704
|
+
},
|
|
705
|
+
};
|
|
706
|
+
/** PGV-019: createBaseApp readiness list must cover declared dependencies (ADR-PIPELINE-076). */
|
|
707
|
+
const RULE_PGV_019 = {
|
|
708
|
+
id: 'PGV-019',
|
|
709
|
+
title: 'createBaseApp readiness list should cover every external dependency',
|
|
710
|
+
severity: 'warn',
|
|
711
|
+
adrReference: 'ADR-PIPELINE-076',
|
|
712
|
+
check: (files) => {
|
|
713
|
+
const findings = [];
|
|
714
|
+
// Match createBaseApp({...}) invocation blocks and inspect the
|
|
715
|
+
// readiness array. Because the rule is regex-based, we only flag
|
|
716
|
+
// the clearly-wrong cases: readiness is the empty array alongside
|
|
717
|
+
// clear dependencies on db, erp, auditRepo, or circuit-breaker.
|
|
718
|
+
const invocationPattern = /createBaseApp\s*\(\s*\{([\s\S]*?)readiness\s*:\s*\[([\s\S]*?)\]([\s\S]*?)\}\s*\)/g;
|
|
719
|
+
for (const [filePath, content] of files) {
|
|
720
|
+
if (isTestFile(filePath))
|
|
721
|
+
continue;
|
|
722
|
+
let match;
|
|
723
|
+
const re = new RegExp(invocationPattern.source, 'g');
|
|
724
|
+
while ((match = re.exec(content)) !== null) {
|
|
725
|
+
const body = match[2] ?? '';
|
|
726
|
+
const trimmedBody = body.trim();
|
|
727
|
+
// Empty readiness list
|
|
728
|
+
const isEmpty = trimmedBody.length === 0;
|
|
729
|
+
// Only contains unconditional-pass checks like `{ ok: true }`
|
|
730
|
+
const onlyUnconditional = !isEmpty &&
|
|
731
|
+
/ok\s*:\s*true/.test(trimmedBody) &&
|
|
732
|
+
!/await|await\s+\w/.test(trimmedBody);
|
|
733
|
+
if (!isEmpty && !onlyUnconditional)
|
|
734
|
+
continue;
|
|
735
|
+
// Does the file clearly declare downstream dependencies?
|
|
736
|
+
const hasDbDep = /\bDatabase\s*\(|new\s+Database\s*\(|SqliteRepository|drizzle|sqlite/i.test(content);
|
|
737
|
+
const hasErpDep = /ErpAdapter|erp[_-]?client|Ramco|OPERA|Workday|SAP\b/i.test(content);
|
|
738
|
+
const hasCircuitDep = /CircuitBreaker/.test(content);
|
|
739
|
+
const depCount = [hasDbDep, hasErpDep, hasCircuitDep].filter(Boolean).length;
|
|
740
|
+
if (depCount < 1)
|
|
741
|
+
continue;
|
|
742
|
+
findings.push({
|
|
743
|
+
ruleId: 'PGV-019',
|
|
744
|
+
filePath,
|
|
745
|
+
line: lineAt(content, match.index),
|
|
746
|
+
severity: 'warn',
|
|
747
|
+
message: `createBaseApp readiness list is ${isEmpty ? 'empty' : 'unconditional-pass-only'} but the file has ${depCount} external dependencies (db/erp/circuit). ` +
|
|
748
|
+
'Add a readiness check for each dependency so /health/ready fails when they are unavailable (ADR-PIPELINE-076).',
|
|
749
|
+
});
|
|
750
|
+
if (match.index === re.lastIndex)
|
|
751
|
+
re.lastIndex++;
|
|
752
|
+
}
|
|
753
|
+
}
|
|
754
|
+
return findings;
|
|
755
|
+
},
|
|
756
|
+
};
|
|
757
|
+
/** PGV-020: Every ERP adapter/schema file exports ERP_SCHEMA_PROVENANCE (ADR-PIPELINE-077). */
|
|
758
|
+
const RULE_PGV_020 = {
|
|
759
|
+
id: 'PGV-020',
|
|
760
|
+
title: 'ERP adapter/schema files must export ERP_SCHEMA_PROVENANCE',
|
|
761
|
+
severity: 'error',
|
|
762
|
+
adrReference: 'ADR-PIPELINE-077',
|
|
763
|
+
check: (files) => {
|
|
764
|
+
const findings = [];
|
|
765
|
+
for (const [filePath, content] of files) {
|
|
766
|
+
const n = filePath.replace(/\\/g, '/').toLowerCase();
|
|
767
|
+
// Scope: files under src/erp/ that are NOT tests and NOT the
|
|
768
|
+
// scaffolded helper itself (schema-provenance.ts).
|
|
769
|
+
if (!n.includes('/erp/'))
|
|
770
|
+
continue;
|
|
771
|
+
if (isTestFile(filePath))
|
|
772
|
+
continue;
|
|
773
|
+
if (n.endsWith('/schema-provenance.ts'))
|
|
774
|
+
continue;
|
|
775
|
+
// Skip obvious non-adapter files (enums, constants, utilities).
|
|
776
|
+
// We only require the block on files that export a Zod schema OR
|
|
777
|
+
// look like an adapter class — these are the code paths that talk
|
|
778
|
+
// to the real ERP.
|
|
779
|
+
const looksLikeAdapter = /\bz\.object\s*\(/.test(content) ||
|
|
780
|
+
/\bclass\s+\w*(?:Adapter|Client|Schema)\b/.test(content) ||
|
|
781
|
+
/\bexport\s+const\s+\w+Schema\s*=/.test(content);
|
|
782
|
+
if (!looksLikeAdapter)
|
|
783
|
+
continue;
|
|
784
|
+
const hasBlock = /export\s+const\s+ERP_SCHEMA_PROVENANCE\b/.test(content);
|
|
785
|
+
if (!hasBlock) {
|
|
786
|
+
findings.push({
|
|
787
|
+
ruleId: 'PGV-020',
|
|
788
|
+
filePath,
|
|
789
|
+
line: 0,
|
|
790
|
+
severity: 'error',
|
|
791
|
+
message: 'ERP adapter/schema file does not export `ERP_SCHEMA_PROVENANCE`. ' +
|
|
792
|
+
"Import `ErpSchemaProvenance` from `src/erp/schema-provenance.ts` and export a constant with `source: 'invented'` as the default (ADR-PIPELINE-077).",
|
|
793
|
+
});
|
|
794
|
+
}
|
|
795
|
+
}
|
|
796
|
+
return findings;
|
|
797
|
+
},
|
|
798
|
+
};
|
|
799
|
+
/** PGV-021: source='validated' requires non-null reviewer + catalog_version (ADR-PIPELINE-077). */
|
|
800
|
+
const RULE_PGV_021 = {
|
|
801
|
+
id: 'PGV-021',
|
|
802
|
+
title: "ERP_SCHEMA_PROVENANCE source='validated' requires reviewer + catalog_version",
|
|
803
|
+
severity: 'error',
|
|
804
|
+
adrReference: 'ADR-PIPELINE-077',
|
|
805
|
+
check: (files) => {
|
|
806
|
+
const findings = [];
|
|
807
|
+
// Regex over the provenance literal. We look for the object body
|
|
808
|
+
// between `ERP_SCHEMA_PROVENANCE` and the next top-level `};` so
|
|
809
|
+
// multi-line declarations are captured.
|
|
810
|
+
const literalPattern = /export\s+const\s+ERP_SCHEMA_PROVENANCE[^=]*=\s*\{([\s\S]*?)\}\s*;/g;
|
|
811
|
+
for (const [filePath, content] of files) {
|
|
812
|
+
const n = filePath.replace(/\\/g, '/').toLowerCase();
|
|
813
|
+
if (!n.includes('/erp/'))
|
|
814
|
+
continue;
|
|
815
|
+
if (isTestFile(filePath))
|
|
816
|
+
continue;
|
|
817
|
+
if (n.endsWith('/schema-provenance.ts'))
|
|
818
|
+
continue;
|
|
819
|
+
let match;
|
|
820
|
+
const re = new RegExp(literalPattern.source, 'g');
|
|
821
|
+
while ((match = re.exec(content)) !== null) {
|
|
822
|
+
const body = match[1] ?? '';
|
|
823
|
+
// Extract source value — tolerate single / double quotes + backticks
|
|
824
|
+
const sourceMatch = body.match(/\bsource\s*:\s*['"`](\w+)['"`]/);
|
|
825
|
+
if (!sourceMatch || sourceMatch[1] !== 'validated')
|
|
826
|
+
continue;
|
|
827
|
+
// Under validated, reviewer and catalog_version must be non-null
|
|
828
|
+
const reviewerMatch = body.match(/\breviewer\s*:\s*([^,}]+)/);
|
|
829
|
+
const catalogMatch = body.match(/\bcatalog_version\s*:\s*([^,}]+)/);
|
|
830
|
+
const reviewerVal = (reviewerMatch?.[1] ?? '').trim();
|
|
831
|
+
const catalogVal = (catalogMatch?.[1] ?? '').trim();
|
|
832
|
+
const reviewerOk = reviewerVal !== '' && reviewerVal !== 'null' && reviewerVal !== 'undefined';
|
|
833
|
+
const catalogOk = catalogVal !== '' && catalogVal !== 'null' && catalogVal !== 'undefined';
|
|
834
|
+
if (!reviewerOk || !catalogOk) {
|
|
835
|
+
const missing = [];
|
|
836
|
+
if (!reviewerOk)
|
|
837
|
+
missing.push('reviewer');
|
|
838
|
+
if (!catalogOk)
|
|
839
|
+
missing.push('catalog_version');
|
|
840
|
+
findings.push({
|
|
841
|
+
ruleId: 'PGV-021',
|
|
842
|
+
filePath,
|
|
843
|
+
line: lineAt(content, match.index),
|
|
844
|
+
severity: 'error',
|
|
845
|
+
message: `ERP_SCHEMA_PROVENANCE has source='validated' but ${missing.join(' and ')} is null/missing. ` +
|
|
846
|
+
"Record the SME review details (reviewer name + role, catalog version) — don't rubber-stamp the validation status (ADR-PIPELINE-077).",
|
|
847
|
+
});
|
|
848
|
+
}
|
|
849
|
+
if (match.index === re.lastIndex)
|
|
850
|
+
re.lastIndex++;
|
|
851
|
+
}
|
|
852
|
+
}
|
|
853
|
+
return findings;
|
|
854
|
+
},
|
|
855
|
+
};
|
|
856
|
+
/** PGV-022: Audit services must use hashAuditEntry from the scaffold (ADR-PIPELINE-078). */
|
|
857
|
+
const RULE_PGV_022 = {
|
|
858
|
+
id: 'PGV-022',
|
|
859
|
+
title: 'Audit services must hash with hashAuditEntry, not raw JSON.stringify + SHA-256',
|
|
860
|
+
severity: 'error',
|
|
861
|
+
adrReference: 'ADR-PIPELINE-078',
|
|
862
|
+
check: (files) => {
|
|
863
|
+
const findings = [];
|
|
864
|
+
// Match the anti-pattern: createHash('sha256').update(JSON.stringify(...))
|
|
865
|
+
// in a file that looks like an audit service.
|
|
866
|
+
const antiPattern = /createHash\s*\(\s*['"]sha256['"]\s*\)[\s\S]{0,80}\.update\s*\(\s*JSON\.stringify/g;
|
|
867
|
+
for (const [filePath, content] of files) {
|
|
868
|
+
if (isTestFile(filePath))
|
|
869
|
+
continue;
|
|
870
|
+
const n = filePath.replace(/\\/g, '/').toLowerCase();
|
|
871
|
+
// Only audit-shaped files: audit*.ts, services/audit*.ts, or files
|
|
872
|
+
// that declare a class with Audit in the name.
|
|
873
|
+
const isAuditShaped = n.endsWith('/audit.ts') ||
|
|
874
|
+
n.includes('/audit') ||
|
|
875
|
+
/\bclass\s+\w*Audit\w*\b/.test(content);
|
|
876
|
+
if (!isAuditShaped)
|
|
877
|
+
continue;
|
|
878
|
+
// Skip the scaffolded helper itself
|
|
879
|
+
if (n.endsWith('/audit-hash.ts') || n.endsWith('/canonical-json.ts'))
|
|
880
|
+
continue;
|
|
881
|
+
let match;
|
|
882
|
+
const re = new RegExp(antiPattern.source, 'g');
|
|
883
|
+
while ((match = re.exec(content)) !== null) {
|
|
884
|
+
findings.push({
|
|
885
|
+
ruleId: 'PGV-022',
|
|
886
|
+
filePath,
|
|
887
|
+
line: lineAt(content, match.index),
|
|
888
|
+
severity: 'error',
|
|
889
|
+
message: 'Raw `JSON.stringify` + `createHash("sha256")` in audit code — use ' +
|
|
890
|
+
'`hashAuditEntry` from `src/persistence/audit-hash.ts` instead. ' +
|
|
891
|
+
'JSON.stringify does not sort keys and nested payload mutations escape tamper detection (ADR-PIPELINE-078).',
|
|
892
|
+
});
|
|
893
|
+
if (match.index === re.lastIndex)
|
|
894
|
+
re.lastIndex++;
|
|
895
|
+
}
|
|
896
|
+
}
|
|
897
|
+
return findings;
|
|
898
|
+
},
|
|
899
|
+
};
|
|
900
|
+
const ALL_RULES = [
|
|
901
|
+
RULE_PGV_001, RULE_PGV_002, RULE_PGV_003, RULE_PGV_004, RULE_PGV_005,
|
|
902
|
+
RULE_PGV_006, RULE_PGV_007, RULE_PGV_008, RULE_PGV_009, RULE_PGV_010,
|
|
903
|
+
RULE_PGV_011, RULE_PGV_012, RULE_PGV_016, RULE_PGV_017, RULE_PGV_018, RULE_PGV_019,
|
|
904
|
+
RULE_PGV_020, RULE_PGV_021, RULE_PGV_022,
|
|
905
|
+
];
|
|
906
|
+
// Exported for unit tests that want to run individual rules
|
|
907
|
+
export const RULES = ALL_RULES;
|
|
908
|
+
// ============================================================================
|
|
909
|
+
// Main Validator
|
|
910
|
+
// ============================================================================
|
|
911
|
+
/**
|
|
912
|
+
* Run all validation rules against the files in a project directory.
|
|
913
|
+
* Pure function — no I/O side effects beyond reading files.
|
|
914
|
+
*/
|
|
915
|
+
export function validateGeneratedCode(projectDir) {
|
|
916
|
+
const files = loadProjectFiles(projectDir);
|
|
917
|
+
const findings = [];
|
|
918
|
+
for (const rule of ALL_RULES) {
|
|
919
|
+
try {
|
|
920
|
+
findings.push(...rule.check(files));
|
|
921
|
+
}
|
|
922
|
+
catch (err) {
|
|
923
|
+
const errMsg = err instanceof Error ? err.message : String(err);
|
|
924
|
+
findings.push({
|
|
925
|
+
ruleId: rule.id,
|
|
926
|
+
filePath: '<rule-error>',
|
|
927
|
+
line: 0,
|
|
928
|
+
severity: 'warn',
|
|
929
|
+
message: `Rule ${rule.id} threw during execution: ${errMsg}. Skipping.`,
|
|
930
|
+
});
|
|
931
|
+
}
|
|
932
|
+
}
|
|
933
|
+
const errorCount = findings.filter(f => f.severity === 'error').length;
|
|
934
|
+
const warnCount = findings.filter(f => f.severity === 'warn').length;
|
|
935
|
+
// Scoring: 100 - (10 * errors, capped at 70) - (2 * warns, capped at 20)
|
|
936
|
+
const errorPenalty = Math.min(70, errorCount * 10);
|
|
937
|
+
const warnPenalty = Math.min(20, warnCount * 2);
|
|
938
|
+
const score = Math.max(0, 100 - errorPenalty - warnPenalty);
|
|
939
|
+
const passed = score >= 70;
|
|
940
|
+
return {
|
|
941
|
+
passed,
|
|
942
|
+
score,
|
|
943
|
+
findings,
|
|
944
|
+
filesScanned: files.size,
|
|
945
|
+
rulesApplied: ALL_RULES.length,
|
|
946
|
+
errorCount,
|
|
947
|
+
warnCount,
|
|
948
|
+
projectDir,
|
|
949
|
+
};
|
|
950
|
+
}
|
|
951
|
+
// ============================================================================
|
|
952
|
+
// Stage Entry Point
|
|
953
|
+
// ============================================================================
|
|
954
|
+
/**
|
|
955
|
+
* Resolve the generated project directory. Prefers the deploy project,
|
|
956
|
+
* falls back to codegen project. Returns null if neither exists.
|
|
957
|
+
*/
|
|
958
|
+
function resolveProjectDir(phase5Dir) {
|
|
959
|
+
const candidates = [
|
|
960
|
+
path.join(phase5Dir, 'deploy', 'project'),
|
|
961
|
+
path.join(phase5Dir, 'codegen', 'project'),
|
|
962
|
+
];
|
|
963
|
+
for (const c of candidates) {
|
|
964
|
+
if (fs.existsSync(c))
|
|
965
|
+
return c;
|
|
966
|
+
}
|
|
967
|
+
return null;
|
|
968
|
+
}
|
|
969
|
+
/**
|
|
970
|
+
* Execute the post-generation code validator as a Phase 5 pipeline stage.
|
|
971
|
+
*
|
|
972
|
+
* Non-blocking by default. When AGENTICS_STRICT_POSTGEN=true, any error-level
|
|
973
|
+
* finding fails the stage. The coordinator decides whether stage failure
|
|
974
|
+
* blocks the pipeline.
|
|
975
|
+
*/
|
|
976
|
+
export function executePostGenerationValidator(context) {
|
|
977
|
+
const startTime = Date.now();
|
|
978
|
+
const span = createSpan('post-generation-validation', 'validate-generated-code', {
|
|
979
|
+
traceId: context.traceId,
|
|
980
|
+
phase5Dir: context.phase5Dir,
|
|
981
|
+
});
|
|
982
|
+
const projectDir = resolveProjectDir(context.phase5Dir);
|
|
983
|
+
if (!projectDir) {
|
|
984
|
+
const elapsed = Date.now() - startTime;
|
|
985
|
+
const failSpan = endSpan(span, 'error', { reason: 'project-not-found' });
|
|
986
|
+
emitSpan(failSpan);
|
|
987
|
+
context.telemetrySpans.push(failSpan);
|
|
988
|
+
return {
|
|
989
|
+
stageOutput: {
|
|
990
|
+
stage: 'post-generation-validation',
|
|
991
|
+
status: 'failed',
|
|
992
|
+
timing: elapsed,
|
|
993
|
+
artifacts: [],
|
|
994
|
+
summary: 'ECLI-P5-041: No generated project directory found (tried deploy/project and codegen/project).',
|
|
995
|
+
data: {
|
|
996
|
+
passed: false,
|
|
997
|
+
score: 0,
|
|
998
|
+
findings: [],
|
|
999
|
+
filesScanned: 0,
|
|
1000
|
+
rulesApplied: ALL_RULES.length,
|
|
1001
|
+
errorCount: 0,
|
|
1002
|
+
warnCount: 0,
|
|
1003
|
+
projectDir: null,
|
|
1004
|
+
},
|
|
1005
|
+
},
|
|
1006
|
+
};
|
|
1007
|
+
}
|
|
1008
|
+
const report = validateGeneratedCode(projectDir);
|
|
1009
|
+
const elapsed = Date.now() - startTime;
|
|
1010
|
+
const strict = process.env['AGENTICS_STRICT_POSTGEN'] === 'true';
|
|
1011
|
+
const failOnStrict = strict && report.errorCount > 0;
|
|
1012
|
+
// Persist a report artifact so reviewers can inspect findings without rerunning
|
|
1013
|
+
const reportPath = path.join(context.phase5Dir, 'post-generation-report.json');
|
|
1014
|
+
try {
|
|
1015
|
+
fs.writeFileSync(reportPath, JSON.stringify(report, null, 2), { encoding: 'utf-8', mode: 0o600 });
|
|
1016
|
+
}
|
|
1017
|
+
catch {
|
|
1018
|
+
// best-effort
|
|
1019
|
+
}
|
|
1020
|
+
// Log findings to stderr using the same pattern as implementation-quality-gate.ts
|
|
1021
|
+
if (report.findings.length > 0) {
|
|
1022
|
+
console.error(` [POSTGEN] Post-generation validation: ${report.errorCount} errors, ${report.warnCount} warnings (score: ${report.score}/100)`);
|
|
1023
|
+
// Group findings by rule for readability
|
|
1024
|
+
const byRule = new Map();
|
|
1025
|
+
for (const f of report.findings) {
|
|
1026
|
+
const list = byRule.get(f.ruleId) ?? [];
|
|
1027
|
+
list.push(f);
|
|
1028
|
+
byRule.set(f.ruleId, list);
|
|
1029
|
+
}
|
|
1030
|
+
for (const [ruleId, findings] of byRule) {
|
|
1031
|
+
const first = findings[0];
|
|
1032
|
+
const severityTag = first.severity === 'error' ? 'ERROR' : 'warn';
|
|
1033
|
+
console.error(` [${severityTag}] ${ruleId} — ${findings.length} finding${findings.length > 1 ? 's' : ''}: ${first.message}`);
|
|
1034
|
+
for (const f of findings.slice(0, 5)) {
|
|
1035
|
+
console.error(` ${f.filePath}${f.line > 0 ? `:${f.line}` : ''}`);
|
|
1036
|
+
}
|
|
1037
|
+
if (findings.length > 5) {
|
|
1038
|
+
console.error(` ... and ${findings.length - 5} more (see post-generation-report.json)`);
|
|
1039
|
+
}
|
|
1040
|
+
}
|
|
1041
|
+
}
|
|
1042
|
+
else {
|
|
1043
|
+
console.error(` [POSTGEN] Post-generation validation: PASSED (score: ${report.score}/100, ${report.filesScanned} files, ${ALL_RULES.length} rules).`);
|
|
1044
|
+
}
|
|
1045
|
+
const finalSpan = endSpan(span, failOnStrict ? 'error' : 'ok', {
|
|
1046
|
+
errorCount: report.errorCount,
|
|
1047
|
+
warnCount: report.warnCount,
|
|
1048
|
+
score: report.score,
|
|
1049
|
+
strict,
|
|
1050
|
+
});
|
|
1051
|
+
emitSpan(finalSpan);
|
|
1052
|
+
context.telemetrySpans.push(finalSpan);
|
|
1053
|
+
const status = failOnStrict ? 'failed' : 'completed';
|
|
1054
|
+
const summary = failOnStrict
|
|
1055
|
+
? `ECLI-P5-040: Post-generation validation FAILED in strict mode — ${report.errorCount} errors, score ${report.score}/100.`
|
|
1056
|
+
: ` Post-generation validation: ${report.errorCount} errors, ${report.warnCount} warnings, score ${report.score}/100 (${report.filesScanned} files scanned, ${ALL_RULES.length} rules).`;
|
|
1057
|
+
return {
|
|
1058
|
+
stageOutput: {
|
|
1059
|
+
stage: 'post-generation-validation',
|
|
1060
|
+
status,
|
|
1061
|
+
timing: elapsed,
|
|
1062
|
+
artifacts: [reportPath],
|
|
1063
|
+
summary,
|
|
1064
|
+
data: report,
|
|
1065
|
+
},
|
|
1066
|
+
};
|
|
1067
|
+
}
|
|
1068
|
+
//# sourceMappingURL=post-generation-validator.js.map
|