@unifyplane/logsdk 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.github/copilot-instructions.md +48 -0
- package/README.md +8 -0
- package/contracts/specs/LogSDKFuntionalSpec.md +394 -0
- package/contracts/specs/fanout-semantics.v1.md +244 -0
- package/contracts/specs/sink-contract.v1.md +223 -0
- package/contracts/specs/step-record.v1.md +292 -0
- package/contracts/specs/validation-rules.v1.md +324 -0
- package/docs/LogSDK-Unified-Execution-Logging-Framework.md +93 -0
- package/docs/log_sdk_test_cases_traceability_plan.md +197 -0
- package/docs/log_sdk_test_coverage_report.md +198 -0
- package/docs/prompts/AuditorSDK.txt +214 -0
- package/package.json +29 -0
- package/src/core/clock.ts +25 -0
- package/src/core/context.ts +142 -0
- package/src/core/fanout.ts +38 -0
- package/src/core/ids.ts +35 -0
- package/src/core/message_constraints.ts +66 -0
- package/src/core/outcomes.ts +5 -0
- package/src/core/record_builder.ts +269 -0
- package/src/core/spool.ts +41 -0
- package/src/core/types.ts +56 -0
- package/src/crypto-shim.d.ts +9 -0
- package/src/fs-shim.d.ts +15 -0
- package/src/index.ts +107 -0
- package/src/node-test-shim.d.ts +1 -0
- package/src/perf_hooks-shim.d.ts +7 -0
- package/src/process-shim.d.ts +1 -0
- package/src/sinks/file_ndjson.ts +42 -0
- package/src/sinks/file_ndjson_sink.ts +45 -0
- package/src/sinks/sink_types.ts +15 -0
- package/src/sinks/stdout_sink.ts +20 -0
- package/src/validate/api_surface_guard.ts +106 -0
- package/src/validate/noncompliance.ts +33 -0
- package/src/validate/schema_guard.ts +238 -0
- package/tests/fanout.test.ts +51 -0
- package/tests/fanout_spool.test.ts +96 -0
- package/tests/message_constraints.test.ts +7 -0
- package/tests/node-shim.d.ts +1 -0
- package/tests/record_builder.test.ts +32 -0
- package/tests/sequence_monotonic.test.ts +62 -0
- package/tests/sinks_file_ndjson.test.ts +53 -0
- package/tests/step1_compliance.test.ts +192 -0
- package/tools/test_results/generate-test-traceability.js +60 -0
- package/tools/test_results/normalize-test-results.js +57 -0
- package/tools/test_results/run-tests-then-prebuild.js +103 -0
- package/tools/test_results/test-case-map.json +9 -0
- package/tsconfig.json +31 -0
- package/validators/bootstrap/validate-repo-structure.ts +590 -0
|
@@ -0,0 +1,590 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* validators/validate-repo-structure.ts
|
|
3
|
+
*
|
|
4
|
+
* UnifyPlane — Repo Structure Validator (STRICT)
|
|
5
|
+
*
|
|
6
|
+
* Enforces REQUIRED repo-root layout:
|
|
7
|
+
* contracts/, validators/, semantics/, tools/, src/, tests/, README.md
|
|
8
|
+
*
|
|
9
|
+
* Enforces contracts/ substructure (only):
|
|
10
|
+
* specs/, schemas/, registries/, constraints/, abi/, authority/
|
|
11
|
+
*
|
|
12
|
+
* Enforces structural separation:
|
|
13
|
+
* - No validators/semantics/tools/evidence under contracts/
|
|
14
|
+
* - No evidence/ directory anywhere in repo (evidence is runtime-only, outside Git)
|
|
15
|
+
* - No runtime log artifacts in Git (e.g., *.ndjson, *.log)
|
|
16
|
+
*
|
|
17
|
+
* Enforces runtime boundary:
|
|
18
|
+
* - src/ contains runtime execution only
|
|
19
|
+
* - No generators, audits, or repo-inspection logic under src/
|
|
20
|
+
*
|
|
21
|
+
* Enforces system-type constraint:
|
|
22
|
+
* - contracts/authority/ must be empty or absent for pipeline/content systems
|
|
23
|
+
* - contracts/authority/ allowed for process systems only
|
|
24
|
+
*
|
|
25
|
+
* Output:
|
|
26
|
+
* evidence/validators/validate-repo-structure/repo-structure-audit.json
|
|
27
|
+
*
|
|
28
|
+
* Usage:
|
|
29
|
+
* npx tsx validators/validate-repo-structure.ts --root <repo-root>
|
|
30
|
+
* (Optional override) --systemType process|pipeline|content
|
|
31
|
+
*
|
|
32
|
+
* NOTE: --root is REQUIRED (no ambient CWD fallback).
|
|
33
|
+
*/
|
|
34
|
+
|
|
35
|
+
import * as fs from "node:fs/promises";
|
|
36
|
+
import * as path from "node:path";
|
|
37
|
+
|
|
38
|
+
type SystemType = "process" | "pipeline" | "content";
|
|
39
|
+
|
|
40
|
+
type Severity = "high" | "medium" | "low" | "info";
|
|
41
|
+
|
|
42
|
+
type Finding = {
|
|
43
|
+
severity: Severity;
|
|
44
|
+
rule_id: string;
|
|
45
|
+
message: string;
|
|
46
|
+
path?: string;
|
|
47
|
+
expected?: string;
|
|
48
|
+
observed?: string;
|
|
49
|
+
};
|
|
50
|
+
|
|
51
|
+
type AuditReport = {
|
|
52
|
+
audit_metadata: {
|
|
53
|
+
validator: "validate-repo-structure";
|
|
54
|
+
mode: "strict";
|
|
55
|
+
repo_root: string;
|
|
56
|
+
repo_name: string;
|
|
57
|
+
system_type: SystemType;
|
|
58
|
+
audited_at: string;
|
|
59
|
+
validator_file: string;
|
|
60
|
+
};
|
|
61
|
+
summary: {
|
|
62
|
+
overall_status: "COMPLIANT" | "NON_COMPLIANT";
|
|
63
|
+
risk_level: "LOW" | "MEDIUM" | "HIGH";
|
|
64
|
+
total_findings: number;
|
|
65
|
+
by_severity: Record<Severity, number>;
|
|
66
|
+
missing_required: string[];
|
|
67
|
+
};
|
|
68
|
+
findings: Finding[];
|
|
69
|
+
folder_inventory: {
|
|
70
|
+
top_level: Record<string, boolean>;
|
|
71
|
+
contracts: Record<string, { exists: boolean; file_count: number; dir_count: number }>;
|
|
72
|
+
};
|
|
73
|
+
notes: string[];
|
|
74
|
+
output_path: string;
|
|
75
|
+
};
|
|
76
|
+
|
|
77
|
+
const REQUIRED_TOP_LEVEL = [
|
|
78
|
+
"contracts",
|
|
79
|
+
"validators",
|
|
80
|
+
"semantics",
|
|
81
|
+
"tools",
|
|
82
|
+
"src",
|
|
83
|
+
"tests",
|
|
84
|
+
] as const;
|
|
85
|
+
|
|
86
|
+
const REQUIRED_FILES = ["README.md"] as const;
|
|
87
|
+
|
|
88
|
+
// Explicitly ignored repo-root helper artifacts (non-authoritative, diagnostic only)
|
|
89
|
+
// NOTE: These are allowed ONLY at repo root and MUST NOT expand without governance review.
|
|
90
|
+
const IGNORED_TOP_LEVEL_FILES = new Set([
|
|
91
|
+
"filestructure.output.txt",
|
|
92
|
+
]);
|
|
93
|
+
|
|
94
|
+
const CONTRACTS_ALLOWED_SUBFOLDERS = [
|
|
95
|
+
"specs",
|
|
96
|
+
"schemas",
|
|
97
|
+
"registries",
|
|
98
|
+
"constraints",
|
|
99
|
+
"abi",
|
|
100
|
+
"authority",
|
|
101
|
+
] as const;
|
|
102
|
+
|
|
103
|
+
const CONTRACTS_FORBIDDEN_SUBFOLDERS = [
|
|
104
|
+
"validators",
|
|
105
|
+
"semantics",
|
|
106
|
+
"tools",
|
|
107
|
+
"evidence",
|
|
108
|
+
] as const;
|
|
109
|
+
|
|
110
|
+
const DEFAULT_IGNORE_DIRS = new Set([
|
|
111
|
+
".git",
|
|
112
|
+
".github",
|
|
113
|
+
".vscode",
|
|
114
|
+
"node_modules",
|
|
115
|
+
"dist",
|
|
116
|
+
"build",
|
|
117
|
+
"out",
|
|
118
|
+
"coverage",
|
|
119
|
+
".next",
|
|
120
|
+
".turbo",
|
|
121
|
+
".cache",
|
|
122
|
+
".pnpm-store",
|
|
123
|
+
]);
|
|
124
|
+
|
|
125
|
+
// Runtime evidence artifacts that must not live in Git
|
|
126
|
+
const FORBIDDEN_FILE_EXTS = new Set([
|
|
127
|
+
".ndjson",
|
|
128
|
+
".log",
|
|
129
|
+
".trace",
|
|
130
|
+
".spool",
|
|
131
|
+
".loki",
|
|
132
|
+
]);
|
|
133
|
+
|
|
134
|
+
const FORBIDDEN_FILE_NAMES = new Set([
|
|
135
|
+
"loki.ndjson",
|
|
136
|
+
"trace.ndjson",
|
|
137
|
+
"logs.ndjson",
|
|
138
|
+
]);
|
|
139
|
+
|
|
140
|
+
// Allowed evidence location (tool outputs only)
|
|
141
|
+
const ALLOWED_EVIDENCE_PREFIX = "evidence/validators/";
|
|
142
|
+
|
|
143
|
+
function nowIso(): string {
|
|
144
|
+
return new Date().toISOString();
|
|
145
|
+
}
|
|
146
|
+
|
|
147
|
+
function parseArgs(argv: string[]) {
|
|
148
|
+
const args: Record<string, string | boolean> = {};
|
|
149
|
+
for (let i = 0; i < argv.length; i++) {
|
|
150
|
+
const a = argv[i];
|
|
151
|
+
if (a === "--root") args.root = argv[++i] ?? "";
|
|
152
|
+
else if (a === "--systemType") args.systemType = argv[++i] ?? "";
|
|
153
|
+
else if (a === "--help") args.help = true;
|
|
154
|
+
}
|
|
155
|
+
return args;
|
|
156
|
+
}
|
|
157
|
+
|
|
158
|
+
async function exists(p: string): Promise<boolean> {
|
|
159
|
+
try {
|
|
160
|
+
await fs.access(p);
|
|
161
|
+
return true;
|
|
162
|
+
} catch {
|
|
163
|
+
return false;
|
|
164
|
+
}
|
|
165
|
+
}
|
|
166
|
+
|
|
167
|
+
async function statSafe(p: string) {
|
|
168
|
+
try {
|
|
169
|
+
return await fs.stat(p);
|
|
170
|
+
} catch {
|
|
171
|
+
return null;
|
|
172
|
+
}
|
|
173
|
+
}
|
|
174
|
+
|
|
175
|
+
async function listDirSafe(p: string): Promise<string[]> {
|
|
176
|
+
try {
|
|
177
|
+
return await fs.readdir(p);
|
|
178
|
+
} catch {
|
|
179
|
+
return [];
|
|
180
|
+
}
|
|
181
|
+
}
|
|
182
|
+
|
|
183
|
+
function classifyRisk(findings: Finding[]): "LOW" | "MEDIUM" | "HIGH" {
|
|
184
|
+
if (findings.some((f) => f.severity === "high")) return "HIGH";
|
|
185
|
+
if (findings.some((f) => f.severity === "medium")) return "MEDIUM";
|
|
186
|
+
return "LOW";
|
|
187
|
+
}
|
|
188
|
+
|
|
189
|
+
function countBySeverity(findings: Finding[]): Record<Severity, number> {
|
|
190
|
+
return findings.reduce(
|
|
191
|
+
(acc, f) => {
|
|
192
|
+
acc[f.severity] += 1;
|
|
193
|
+
return acc;
|
|
194
|
+
},
|
|
195
|
+
{ high: 0, medium: 0, low: 0, info: 0 } as Record<Severity, number>,
|
|
196
|
+
);
|
|
197
|
+
}
|
|
198
|
+
|
|
199
|
+
/**
|
|
200
|
+
* Extract system type from README.md (STRICT expectation).
|
|
201
|
+
* Accepted patterns (case-insensitive):
|
|
202
|
+
* - "System Type: process"
|
|
203
|
+
* - "system_type: pipeline"
|
|
204
|
+
* - "system-type = content"
|
|
205
|
+
*/
|
|
206
|
+
function extractSystemTypeFromReadme(readme: string): SystemType | null {
|
|
207
|
+
const re = /system[\s_-]*type\s*[:=]\s*(process|pipeline|content)\b/i;
|
|
208
|
+
const m = readme.match(re);
|
|
209
|
+
if (!m) return null;
|
|
210
|
+
const v = m[1].toLowerCase();
|
|
211
|
+
if (v === "process" || v === "pipeline" || v === "content") return v;
|
|
212
|
+
return null;
|
|
213
|
+
}
|
|
214
|
+
|
|
215
|
+
async function walkRepo(
|
|
216
|
+
root: string,
|
|
217
|
+
onEntry: (absPath: string, relPath: string, st: { isDir: boolean; isFile: boolean }) => Promise<void>,
|
|
218
|
+
ignoreDirs: Set<string>,
|
|
219
|
+
) {
|
|
220
|
+
async function walkDir(dirAbs: string, dirRel: string) {
|
|
221
|
+
const entries = await fs.readdir(dirAbs, { withFileTypes: true });
|
|
222
|
+
for (const ent of entries) {
|
|
223
|
+
const abs = path.join(dirAbs, ent.name);
|
|
224
|
+
const rel = dirRel ? path.posix.join(dirRel, ent.name) : ent.name;
|
|
225
|
+
|
|
226
|
+
if (ent.isDirectory()) {
|
|
227
|
+
if (ignoreDirs.has(ent.name)) continue;
|
|
228
|
+
await onEntry(abs, rel, { isDir: true, isFile: false });
|
|
229
|
+
await walkDir(abs, rel);
|
|
230
|
+
} else if (ent.isFile()) {
|
|
231
|
+
await onEntry(abs, rel, { isDir: false, isFile: true });
|
|
232
|
+
}
|
|
233
|
+
}
|
|
234
|
+
}
|
|
235
|
+
|
|
236
|
+
await walkDir(root, "");
|
|
237
|
+
}
|
|
238
|
+
|
|
239
|
+
async function main() {
|
|
240
|
+
const args = parseArgs(process.argv.slice(2));
|
|
241
|
+
if (args.help) {
|
|
242
|
+
console.log(
|
|
243
|
+
[
|
|
244
|
+
"validate-repo-structure (STRICT)",
|
|
245
|
+
"Usage:",
|
|
246
|
+
" npx tsx validators/validate-repo-structure.ts --root <repo-root> [--systemType process|pipeline|content]",
|
|
247
|
+
"",
|
|
248
|
+
"Notes:",
|
|
249
|
+
" --root is REQUIRED. No ambient process.cwd() fallback.",
|
|
250
|
+
].join("\n"),
|
|
251
|
+
);
|
|
252
|
+
process.exit(0);
|
|
253
|
+
}
|
|
254
|
+
|
|
255
|
+
const rootArg = typeof args.root === "string" ? args.root.trim() : "";
|
|
256
|
+
if (!rootArg) {
|
|
257
|
+
console.error("ERROR: --root is required (no ambient CWD fallback).");
|
|
258
|
+
process.exit(2);
|
|
259
|
+
}
|
|
260
|
+
|
|
261
|
+
const repoRoot = path.resolve(rootArg);
|
|
262
|
+
const repoName = path.basename(repoRoot);
|
|
263
|
+
|
|
264
|
+
const findings: Finding[] = [];
|
|
265
|
+
const notes: string[] = [];
|
|
266
|
+
|
|
267
|
+
// --- Required presence checks
|
|
268
|
+
const topLevelInventory: Record<string, boolean> = {};
|
|
269
|
+
const missingRequired: string[] = [];
|
|
270
|
+
|
|
271
|
+
for (const folder of REQUIRED_TOP_LEVEL) {
|
|
272
|
+
const p = path.join(repoRoot, folder);
|
|
273
|
+
const ok = await exists(p);
|
|
274
|
+
topLevelInventory[folder] = ok;
|
|
275
|
+
if (!ok) {
|
|
276
|
+
missingRequired.push(`${folder}/`);
|
|
277
|
+
findings.push({
|
|
278
|
+
severity: "high",
|
|
279
|
+
rule_id: "MISSING_REQUIRED_TOP_LEVEL",
|
|
280
|
+
message: `Missing required top-level folder: ${folder}/`,
|
|
281
|
+
path: `${folder}/`,
|
|
282
|
+
expected: "Folder must exist at repo root (strict mode).",
|
|
283
|
+
});
|
|
284
|
+
}
|
|
285
|
+
}
|
|
286
|
+
|
|
287
|
+
for (const file of REQUIRED_FILES) {
|
|
288
|
+
const p = path.join(repoRoot, file);
|
|
289
|
+
const ok = await exists(p);
|
|
290
|
+
topLevelInventory[file] = ok;
|
|
291
|
+
if (!ok) {
|
|
292
|
+
missingRequired.push(file);
|
|
293
|
+
findings.push({
|
|
294
|
+
severity: "high",
|
|
295
|
+
rule_id: "MISSING_REQUIRED_FILE",
|
|
296
|
+
message: `Missing required repo-root file: ${file}`,
|
|
297
|
+
path: file,
|
|
298
|
+
expected: "File must exist at repo root (strict mode).",
|
|
299
|
+
});
|
|
300
|
+
}
|
|
301
|
+
}
|
|
302
|
+
|
|
303
|
+
// --- Determine system type (from override OR README)
|
|
304
|
+
let systemType: SystemType | null = null;
|
|
305
|
+
|
|
306
|
+
const override = typeof args.systemType === "string" ? args.systemType.trim().toLowerCase() : "";
|
|
307
|
+
if (override) {
|
|
308
|
+
if (override === "process" || override === "pipeline" || override === "content") {
|
|
309
|
+
systemType = override;
|
|
310
|
+
notes.push("System type provided via --systemType override.");
|
|
311
|
+
} else {
|
|
312
|
+
findings.push({
|
|
313
|
+
severity: "high",
|
|
314
|
+
rule_id: "INVALID_SYSTEM_TYPE_OVERRIDE",
|
|
315
|
+
message: `Invalid --systemType value: ${override}`,
|
|
316
|
+
expected: "process | pipeline | content",
|
|
317
|
+
});
|
|
318
|
+
}
|
|
319
|
+
}
|
|
320
|
+
|
|
321
|
+
if (!systemType && (await exists(path.join(repoRoot, "README.md")))) {
|
|
322
|
+
const readme = await fs.readFile(path.join(repoRoot, "README.md"), "utf8");
|
|
323
|
+
const parsed = extractSystemTypeFromReadme(readme);
|
|
324
|
+
if (!parsed) {
|
|
325
|
+
findings.push({
|
|
326
|
+
severity: "high",
|
|
327
|
+
rule_id: "README_MISSING_SYSTEM_TYPE",
|
|
328
|
+
message: "README.md does not declare system type.",
|
|
329
|
+
path: "README.md",
|
|
330
|
+
expected: 'Include a line like "System Type: process|pipeline|content" (strict mode).',
|
|
331
|
+
});
|
|
332
|
+
} else {
|
|
333
|
+
systemType = parsed;
|
|
334
|
+
}
|
|
335
|
+
}
|
|
336
|
+
|
|
337
|
+
if (!systemType) {
|
|
338
|
+
// Default to pipeline for report completeness, but keep non-compliance
|
|
339
|
+
systemType = "pipeline";
|
|
340
|
+
findings.push({
|
|
341
|
+
severity: "high",
|
|
342
|
+
rule_id: "SYSTEM_TYPE_UNRESOLVED",
|
|
343
|
+
message: "System type could not be resolved; using pipeline as placeholder for report.",
|
|
344
|
+
expected: "Declare system type in README or pass --systemType.",
|
|
345
|
+
});
|
|
346
|
+
}
|
|
347
|
+
|
|
348
|
+
// --- contracts/ structural checks
|
|
349
|
+
const contractsPath = path.join(repoRoot, "contracts");
|
|
350
|
+
const contractsInventory: Record<string, { exists: boolean; file_count: number; dir_count: number }> =
|
|
351
|
+
Object.fromEntries(
|
|
352
|
+
CONTRACTS_ALLOWED_SUBFOLDERS.map((k) => [k, { exists: false, file_count: 0, dir_count: 0 }]),
|
|
353
|
+
);
|
|
354
|
+
|
|
355
|
+
if (await exists(contractsPath)) {
|
|
356
|
+
const entries = await fs.readdir(contractsPath, { withFileTypes: true });
|
|
357
|
+
|
|
358
|
+
// Disallow any unknown subfolder under contracts/
|
|
359
|
+
for (const ent of entries) {
|
|
360
|
+
if (!ent.isDirectory()) continue;
|
|
361
|
+
const name = ent.name;
|
|
362
|
+
|
|
363
|
+
const isAllowed = (CONTRACTS_ALLOWED_SUBFOLDERS as readonly string[]).includes(name);
|
|
364
|
+
if (!isAllowed) {
|
|
365
|
+
findings.push({
|
|
366
|
+
severity: "high",
|
|
367
|
+
rule_id: "NON_CANONICAL_CONTRACTS_SUBFOLDER",
|
|
368
|
+
message: `Non-canonical subfolder under contracts/: ${name}/`,
|
|
369
|
+
path: `contracts/${name}/`,
|
|
370
|
+
expected: `Only allowed: ${CONTRACTS_ALLOWED_SUBFOLDERS.join(", ")}`,
|
|
371
|
+
});
|
|
372
|
+
}
|
|
373
|
+
}
|
|
374
|
+
|
|
375
|
+
// Explicitly forbidden names under contracts/
|
|
376
|
+
for (const forbidden of CONTRACTS_FORBIDDEN_SUBFOLDERS) {
|
|
377
|
+
const p = path.join(contractsPath, forbidden);
|
|
378
|
+
if (await exists(p)) {
|
|
379
|
+
findings.push({
|
|
380
|
+
severity: "high",
|
|
381
|
+
rule_id: "FORBIDDEN_FOLDER_UNDER_CONTRACTS",
|
|
382
|
+
message: `Forbidden folder found under contracts/: ${forbidden}/`,
|
|
383
|
+
path: `contracts/${forbidden}/`,
|
|
384
|
+
expected: "Non-authoritative artifacts must not live under contracts/.",
|
|
385
|
+
});
|
|
386
|
+
}
|
|
387
|
+
}
|
|
388
|
+
|
|
389
|
+
// Inventory allowed subfolders and count content
|
|
390
|
+
for (const sub of CONTRACTS_ALLOWED_SUBFOLDERS) {
|
|
391
|
+
const subPath = path.join(contractsPath, sub);
|
|
392
|
+
const subExists = await exists(subPath);
|
|
393
|
+
if (!subExists) {
|
|
394
|
+
// In strict mode: we require top-level folders, not necessarily every contracts/* subfolder.
|
|
395
|
+
// BUT we still enforce that if contracts/ exists, it should contain only canonical subfolders.
|
|
396
|
+
// Missing subfolders are INFO (not required).
|
|
397
|
+
findings.push({
|
|
398
|
+
severity: "info",
|
|
399
|
+
rule_id: "MISSING_CONTRACTS_SUBFOLDER",
|
|
400
|
+
message: `contracts/${sub}/ is absent (allowed).`,
|
|
401
|
+
path: `contracts/${sub}/`,
|
|
402
|
+
expected: "Optional within contracts/, but recommended for structural completeness.",
|
|
403
|
+
});
|
|
404
|
+
continue;
|
|
405
|
+
}
|
|
406
|
+
|
|
407
|
+
const subEntries = await fs.readdir(subPath, { withFileTypes: true });
|
|
408
|
+
const fileCount = subEntries.filter((e) => e.isFile()).length;
|
|
409
|
+
const dirCount = subEntries.filter((e) => e.isDirectory()).length;
|
|
410
|
+
|
|
411
|
+
contractsInventory[sub] = { exists: true, file_count: fileCount, dir_count: dirCount };
|
|
412
|
+
}
|
|
413
|
+
|
|
414
|
+
// Authority folder system-type constraint
|
|
415
|
+
const authorityPath = path.join(contractsPath, "authority");
|
|
416
|
+
if (await exists(authorityPath)) {
|
|
417
|
+
const authEntries = await fs.readdir(authorityPath, { withFileTypes: true });
|
|
418
|
+
const authFileCount = authEntries.filter((e) => e.isFile()).length;
|
|
419
|
+
const hasAuthMaterial = authFileCount > 0 || authEntries.some((e) => e.isDirectory());
|
|
420
|
+
|
|
421
|
+
if ((systemType === "pipeline" || systemType === "content") && hasAuthMaterial) {
|
|
422
|
+
findings.push({
|
|
423
|
+
severity: "high",
|
|
424
|
+
rule_id: "AUTHORITY_IN_NON_PROCESS_SYSTEM",
|
|
425
|
+
message: `Non-process system (${systemType}) contains authority declarations.`,
|
|
426
|
+
path: "contracts/authority/",
|
|
427
|
+
expected: "Pipeline/content systems must not contain authority declarations.",
|
|
428
|
+
observed: `Found ${authEntries.length} entries.`,
|
|
429
|
+
});
|
|
430
|
+
}
|
|
431
|
+
}
|
|
432
|
+
} else {
|
|
433
|
+
// contracts/ is required by strict top-level check. If missing, already high severity.
|
|
434
|
+
notes.push("contracts/ missing or inaccessible; contracts substructure checks skipped.");
|
|
435
|
+
}
|
|
436
|
+
|
|
437
|
+
// --- Forbidden evidence directory anywhere + forbidden file artifacts anywhere
|
|
438
|
+
const forbiddenEvidenceDirs: string[] = [];
|
|
439
|
+
const forbiddenArtifacts: string[] = [];
|
|
440
|
+
|
|
441
|
+
await walkRepo(
|
|
442
|
+
repoRoot,
|
|
443
|
+
async (abs, rel, st) => {
|
|
444
|
+
const base = path.basename(rel);
|
|
445
|
+
|
|
446
|
+
// evidence/ directory presence is allowed for validator outputs under evidence/validators/
|
|
447
|
+
// Other disallowed artifacts are still caught via file extension rules.
|
|
448
|
+
if (st.isDir && base.toLowerCase() === "evidence") {
|
|
449
|
+
// no-op: evidence folder itself is permitted as a container for validator outputs
|
|
450
|
+
}
|
|
451
|
+
|
|
452
|
+
if (st.isFile) {
|
|
453
|
+
const ext = path.extname(base).toLowerCase();
|
|
454
|
+
const lower = base.toLowerCase();
|
|
455
|
+
|
|
456
|
+
if (FORBIDDEN_FILE_NAMES.has(lower) || FORBIDDEN_FILE_EXTS.has(ext)) {
|
|
457
|
+
// allow validator evidence JSON files under the allowed prefix
|
|
458
|
+
const isAllowedValidatorEvidence =
|
|
459
|
+
rel.startsWith(ALLOWED_EVIDENCE_PREFIX) && ext === ".json";
|
|
460
|
+
|
|
461
|
+
if (!isAllowedValidatorEvidence) {
|
|
462
|
+
forbiddenArtifacts.push(rel);
|
|
463
|
+
}
|
|
464
|
+
}
|
|
465
|
+
}
|
|
466
|
+
},
|
|
467
|
+
DEFAULT_IGNORE_DIRS,
|
|
468
|
+
);
|
|
469
|
+
|
|
470
|
+
for (const d of forbiddenEvidenceDirs) {
|
|
471
|
+
findings.push({
|
|
472
|
+
severity: "high",
|
|
473
|
+
rule_id: "EVIDENCE_FOLDER_IN_GIT",
|
|
474
|
+
message: "evidence/ folder present in repo (evidence must not be stored in Git).",
|
|
475
|
+
path: d,
|
|
476
|
+
expected: "Remove evidence/ from repo; store runtime evidence outside Git.",
|
|
477
|
+
});
|
|
478
|
+
}
|
|
479
|
+
|
|
480
|
+
for (const f of forbiddenArtifacts) {
|
|
481
|
+
findings.push({
|
|
482
|
+
severity: "high",
|
|
483
|
+
rule_id: "RUNTIME_EVIDENCE_ARTIFACT_IN_GIT",
|
|
484
|
+
message: "Runtime evidence/log artifact detected in repo.",
|
|
485
|
+
path: f,
|
|
486
|
+
expected: "Do not commit runtime logs/traces/ndjson/spool artifacts.",
|
|
487
|
+
});
|
|
488
|
+
}
|
|
489
|
+
|
|
490
|
+
// --- Extra top-level folders (reported as medium)
|
|
491
|
+
const topEntries = await fs.readdir(repoRoot, { withFileTypes: true });
|
|
492
|
+
const allowedTopNames = new Set<string>([
|
|
493
|
+
...REQUIRED_TOP_LEVEL,
|
|
494
|
+
...REQUIRED_FILES,
|
|
495
|
+
...Array.from(DEFAULT_IGNORE_DIRS),
|
|
496
|
+
// Common meta
|
|
497
|
+
"evidence", // allowed as container for validator outputs only
|
|
498
|
+
"docs",
|
|
499
|
+
"scripts",
|
|
500
|
+
".editorconfig",
|
|
501
|
+
".gitignore",
|
|
502
|
+
".gitattributes",
|
|
503
|
+
"package.json",
|
|
504
|
+
"pnpm-lock.yaml",
|
|
505
|
+
"package-lock.json",
|
|
506
|
+
"yarn.lock",
|
|
507
|
+
"tsconfig.json",
|
|
508
|
+
"eslint.config.js",
|
|
509
|
+
".eslintrc",
|
|
510
|
+
".prettierrc",
|
|
511
|
+
"prettier.config.js",
|
|
512
|
+
"LICENSE",
|
|
513
|
+
]);
|
|
514
|
+
|
|
515
|
+
for (const ent of topEntries) {
|
|
516
|
+
const name = ent.name;
|
|
517
|
+
// Explicit ignore for known non-authoritative helper files
|
|
518
|
+
if (IGNORED_TOP_LEVEL_FILES.has(name)) continue;
|
|
519
|
+
if (allowedTopNames.has(name)) continue;
|
|
520
|
+
|
|
521
|
+
// Treat unknown top-level as medium (structure drift)
|
|
522
|
+
findings.push({
|
|
523
|
+
severity: "medium",
|
|
524
|
+
rule_id: "UNKNOWN_TOP_LEVEL_ENTRY",
|
|
525
|
+
message: "Unknown / non-canonical top-level entry found.",
|
|
526
|
+
path: name,
|
|
527
|
+
expected: `Keep repo-root minimal; canonical folders are: ${[
|
|
528
|
+
...REQUIRED_TOP_LEVEL,
|
|
529
|
+
...REQUIRED_FILES,
|
|
530
|
+
].join(", ")}`,
|
|
531
|
+
observed: name,
|
|
532
|
+
});
|
|
533
|
+
}
|
|
534
|
+
|
|
535
|
+
// --- Build report
|
|
536
|
+
const bySeverity = countBySeverity(findings);
|
|
537
|
+
const risk = classifyRisk(findings);
|
|
538
|
+
const overall =
|
|
539
|
+
findings.some((f) => f.severity === "high") ? "NON_COMPLIANT" : "COMPLIANT";
|
|
540
|
+
|
|
541
|
+
const report: AuditReport = {
|
|
542
|
+
audit_metadata: {
|
|
543
|
+
validator: "validate-repo-structure",
|
|
544
|
+
mode: "strict",
|
|
545
|
+
repo_root: repoRoot,
|
|
546
|
+
repo_name: repoName,
|
|
547
|
+
system_type: systemType,
|
|
548
|
+
audited_at: nowIso(),
|
|
549
|
+
validator_file: "validators/validate-repo-structure.ts",
|
|
550
|
+
},
|
|
551
|
+
summary: {
|
|
552
|
+
overall_status: overall,
|
|
553
|
+
risk_level: risk,
|
|
554
|
+
total_findings: findings.length,
|
|
555
|
+
by_severity: bySeverity,
|
|
556
|
+
missing_required: missingRequired,
|
|
557
|
+
},
|
|
558
|
+
findings,
|
|
559
|
+
folder_inventory: {
|
|
560
|
+
top_level: topLevelInventory,
|
|
561
|
+
contracts: contractsInventory,
|
|
562
|
+
},
|
|
563
|
+
notes,
|
|
564
|
+
output_path: "evidence/validators/validate-repo-structure/repo-structure-audit.json",
|
|
565
|
+
};
|
|
566
|
+
|
|
567
|
+
// --- Ensure evidence output folder exists and write JSON
|
|
568
|
+
const outDir = path.join(
|
|
569
|
+
repoRoot,
|
|
570
|
+
"evidence",
|
|
571
|
+
"validators",
|
|
572
|
+
"validate-repo-structure",
|
|
573
|
+
);
|
|
574
|
+
await fs.mkdir(outDir, { recursive: true });
|
|
575
|
+
const outPath = path.join(outDir, "repo-structure-audit.json");
|
|
576
|
+
await fs.writeFile(outPath, JSON.stringify(report, null, 2) + "\n", "utf8");
|
|
577
|
+
|
|
578
|
+
// --- Console summary (minimal)
|
|
579
|
+
console.log(`[validate-repo-structure][strict] ${overall} (${risk})`);
|
|
580
|
+
console.log(`Report: ${path.relative(repoRoot, outPath)}`);
|
|
581
|
+
|
|
582
|
+
// Exit code: 0 compliant, 1 non-compliant, 2 usage/config error
|
|
583
|
+
process.exit(overall === "COMPLIANT" ? 0 : 1);
|
|
584
|
+
}
|
|
585
|
+
|
|
586
|
+
main().catch((err) => {
|
|
587
|
+
console.error("FATAL: validator crashed");
|
|
588
|
+
console.error(err instanceof Error ? err.stack ?? err.message : String(err));
|
|
589
|
+
process.exit(2);
|
|
590
|
+
});
|