@unifyplane/logsdk 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.github/copilot-instructions.md +48 -0
- package/README.md +8 -0
- package/contracts/specs/LogSDKFuntionalSpec.md +394 -0
- package/contracts/specs/fanout-semantics.v1.md +244 -0
- package/contracts/specs/sink-contract.v1.md +223 -0
- package/contracts/specs/step-record.v1.md +292 -0
- package/contracts/specs/validation-rules.v1.md +324 -0
- package/docs/LogSDK-Unified-Execution-Logging-Framework.md +93 -0
- package/docs/log_sdk_test_cases_traceability_plan.md +197 -0
- package/docs/log_sdk_test_coverage_report.md +198 -0
- package/docs/prompts/AuditorSDK.txt +214 -0
- package/package.json +29 -0
- package/src/core/clock.ts +25 -0
- package/src/core/context.ts +142 -0
- package/src/core/fanout.ts +38 -0
- package/src/core/ids.ts +35 -0
- package/src/core/message_constraints.ts +66 -0
- package/src/core/outcomes.ts +5 -0
- package/src/core/record_builder.ts +269 -0
- package/src/core/spool.ts +41 -0
- package/src/core/types.ts +56 -0
- package/src/crypto-shim.d.ts +9 -0
- package/src/fs-shim.d.ts +15 -0
- package/src/index.ts +107 -0
- package/src/node-test-shim.d.ts +1 -0
- package/src/perf_hooks-shim.d.ts +7 -0
- package/src/process-shim.d.ts +1 -0
- package/src/sinks/file_ndjson.ts +42 -0
- package/src/sinks/file_ndjson_sink.ts +45 -0
- package/src/sinks/sink_types.ts +15 -0
- package/src/sinks/stdout_sink.ts +20 -0
- package/src/validate/api_surface_guard.ts +106 -0
- package/src/validate/noncompliance.ts +33 -0
- package/src/validate/schema_guard.ts +238 -0
- package/tests/fanout.test.ts +51 -0
- package/tests/fanout_spool.test.ts +96 -0
- package/tests/message_constraints.test.ts +7 -0
- package/tests/node-shim.d.ts +1 -0
- package/tests/record_builder.test.ts +32 -0
- package/tests/sequence_monotonic.test.ts +62 -0
- package/tests/sinks_file_ndjson.test.ts +53 -0
- package/tests/step1_compliance.test.ts +192 -0
- package/tools/test_results/generate-test-traceability.js +60 -0
- package/tools/test_results/normalize-test-results.js +57 -0
- package/tools/test_results/run-tests-then-prebuild.js +103 -0
- package/tools/test_results/test-case-map.json +9 -0
- package/tsconfig.json +31 -0
- package/validators/bootstrap/validate-repo-structure.ts +590 -0
|
@@ -0,0 +1,53 @@
|
|
|
1
|
+
import { test, expect } from "vitest";
|
|
2
|
+
import path from "node:path";
|
|
3
|
+
import os from "node:os";
|
|
4
|
+
import fs from "node:fs/promises";
|
|
5
|
+
import type { StepRecord } from "../src/core/types";
|
|
6
|
+
import { assertStepRecord } from "../src/validate/schema_guard";
|
|
7
|
+
|
|
8
|
+
class FileNdjsonSink {
|
|
9
|
+
constructor(private readonly filePath: string) {}
|
|
10
|
+
|
|
11
|
+
async emit(record: Readonly<StepRecord>): Promise<void> {
|
|
12
|
+
assertStepRecord(record);
|
|
13
|
+
const line = `${JSON.stringify(record)}\n`;
|
|
14
|
+
await fs.appendFile(this.filePath, line, "utf8");
|
|
15
|
+
}
|
|
16
|
+
}
|
|
17
|
+
|
|
18
|
+
function buildRecord(recordId: string): StepRecord {
|
|
19
|
+
return Object.freeze({
|
|
20
|
+
record_version: "log.step.v1",
|
|
21
|
+
record_id: recordId,
|
|
22
|
+
sequence: 1,
|
|
23
|
+
timestamp_utc: new Date().toISOString(),
|
|
24
|
+
monotonic_time: Date.now(),
|
|
25
|
+
institution: "UnifyPlane",
|
|
26
|
+
system_name: "LogSDK",
|
|
27
|
+
system_type: "service",
|
|
28
|
+
environment: "test",
|
|
29
|
+
system_version: "1.0.0",
|
|
30
|
+
instance_id: "unit-test",
|
|
31
|
+
message: "NDJSON check",
|
|
32
|
+
context_hash: "context_hash_001",
|
|
33
|
+
context_version: "log.context.v1",
|
|
34
|
+
record_hash: "hash_001",
|
|
35
|
+
hash_algorithm: "sha256",
|
|
36
|
+
});
|
|
37
|
+
}
|
|
38
|
+
|
|
39
|
+
test("file sink writes NDJSON", async () => {
|
|
40
|
+
const filePath = path.join(os.tmpdir(), `logsdk-${Date.now()}.ndjson`);
|
|
41
|
+
const sink = new FileNdjsonSink(filePath);
|
|
42
|
+
|
|
43
|
+
await sink.emit(buildRecord("rec_ndjson_001"));
|
|
44
|
+
await sink.emit(buildRecord("rec_ndjson_002"));
|
|
45
|
+
|
|
46
|
+
const content = await fs.readFile(filePath, "utf8");
|
|
47
|
+
const lines = content.split(/\r?\n/).filter(Boolean);
|
|
48
|
+
|
|
49
|
+
expect(lines.length).toBe(2);
|
|
50
|
+
const parsed = lines.map((line: string) => JSON.parse(line) as StepRecord);
|
|
51
|
+
expect(parsed[0].record_id).toBe("rec_ndjson_001");
|
|
52
|
+
expect(parsed[1].record_id).toBe("rec_ndjson_002");
|
|
53
|
+
});
|
|
@@ -0,0 +1,192 @@
|
|
|
1
|
+
import { beforeEach, test, expect, vi } from "vitest";
|
|
2
|
+
import path from "node:path";
|
|
3
|
+
import os from "node:os";
|
|
4
|
+
import fs from "node:fs/promises";
|
|
5
|
+
import { initLogSDK } from "../src/index";
|
|
6
|
+
import { createFileNdjsonSink } from "../src/sinks/file_ndjson_sink";
|
|
7
|
+
import { resetContextForTests } from "../src/core/context";
|
|
8
|
+
import * as spool from "../src/core/spool";
|
|
9
|
+
import {
|
|
10
|
+
AUTHORITATIVE_FAILURE_SWALLOWED,
|
|
11
|
+
CONTEXT_INJECTION_VIOLATION,
|
|
12
|
+
NO_AUTHORITATIVE_SINK,
|
|
13
|
+
PAYLOAD_EMBEDDING_VIOLATION,
|
|
14
|
+
NonComplianceError,
|
|
15
|
+
} from "../src/validate/noncompliance";
|
|
16
|
+
|
|
17
|
+
const baseSystem = {
|
|
18
|
+
institution: "UnifyPlane",
|
|
19
|
+
system_name: "LogSDK",
|
|
20
|
+
system_type: "service",
|
|
21
|
+
environment: "test",
|
|
22
|
+
system_version: "1.0.0",
|
|
23
|
+
instance_id: "unit-test",
|
|
24
|
+
};
|
|
25
|
+
|
|
26
|
+
async function expectNonCompliance(
|
|
27
|
+
action: Promise<void>,
|
|
28
|
+
code: string
|
|
29
|
+
): Promise<void> {
|
|
30
|
+
await expect(action).rejects.toMatchObject({ code });
|
|
31
|
+
}
|
|
32
|
+
|
|
33
|
+
beforeEach(() => {
|
|
34
|
+
resetContextForTests();
|
|
35
|
+
});
|
|
36
|
+
|
|
37
|
+
test("fails when no authoritative sink configured", async () => {
|
|
38
|
+
const log = initLogSDK({
|
|
39
|
+
context: { build: "test" },
|
|
40
|
+
system: baseSystem,
|
|
41
|
+
sinks: [
|
|
42
|
+
{
|
|
43
|
+
sinkClass: "observability",
|
|
44
|
+
sink: {
|
|
45
|
+
emit() {
|
|
46
|
+
// no-op
|
|
47
|
+
},
|
|
48
|
+
},
|
|
49
|
+
},
|
|
50
|
+
],
|
|
51
|
+
});
|
|
52
|
+
|
|
53
|
+
await expectNonCompliance(log.step("ok"), NO_AUTHORITATIVE_SINK);
|
|
54
|
+
});
|
|
55
|
+
|
|
56
|
+
test("fails when message contains JSON", async () => {
|
|
57
|
+
const log = initLogSDK({
|
|
58
|
+
context: { build: "test" },
|
|
59
|
+
system: baseSystem,
|
|
60
|
+
sinks: [
|
|
61
|
+
{
|
|
62
|
+
sinkClass: "authoritative",
|
|
63
|
+
sink: {
|
|
64
|
+
emit() {
|
|
65
|
+
// no-op
|
|
66
|
+
},
|
|
67
|
+
},
|
|
68
|
+
},
|
|
69
|
+
],
|
|
70
|
+
});
|
|
71
|
+
|
|
72
|
+
await expectNonCompliance(
|
|
73
|
+
log.step('{"payload":true}'),
|
|
74
|
+
PAYLOAD_EMBEDDING_VIOLATION
|
|
75
|
+
);
|
|
76
|
+
});
|
|
77
|
+
|
|
78
|
+
test("fails on per-step context injection attempt", async () => {
|
|
79
|
+
const log = initLogSDK({
|
|
80
|
+
context: { build: "test" },
|
|
81
|
+
system: baseSystem,
|
|
82
|
+
sinks: [
|
|
83
|
+
{
|
|
84
|
+
sinkClass: "authoritative",
|
|
85
|
+
sink: {
|
|
86
|
+
emit() {
|
|
87
|
+
// no-op
|
|
88
|
+
},
|
|
89
|
+
},
|
|
90
|
+
},
|
|
91
|
+
],
|
|
92
|
+
});
|
|
93
|
+
|
|
94
|
+
const unsafeStep = log.step as (message: unknown) => Promise<void>;
|
|
95
|
+
await expectNonCompliance(
|
|
96
|
+
unsafeStep({ message: "step", context: { injected: true } }),
|
|
97
|
+
CONTEXT_INJECTION_VIOLATION
|
|
98
|
+
);
|
|
99
|
+
});
|
|
100
|
+
|
|
101
|
+
test("degrades when authoritative sink emit throws", async () => {
|
|
102
|
+
const log = initLogSDK({
|
|
103
|
+
context: { build: "test" },
|
|
104
|
+
system: baseSystem,
|
|
105
|
+
sinks: [
|
|
106
|
+
{
|
|
107
|
+
sinkClass: "authoritative",
|
|
108
|
+
sink: {
|
|
109
|
+
emit() {
|
|
110
|
+
throw new Error("boom");
|
|
111
|
+
},
|
|
112
|
+
},
|
|
113
|
+
},
|
|
114
|
+
],
|
|
115
|
+
});
|
|
116
|
+
const spoolPath = path.join(
|
|
117
|
+
os.tmpdir(),
|
|
118
|
+
`logsdk-step1-spool-${Date.now()}-${Math.random()}.ndjson`
|
|
119
|
+
);
|
|
120
|
+
const previousSpool = process.env.LOGSDK_EMERGENCY_SPOOL_PATH;
|
|
121
|
+
process.env.LOGSDK_EMERGENCY_SPOOL_PATH = spoolPath;
|
|
122
|
+
|
|
123
|
+
try {
|
|
124
|
+
await log.step("step");
|
|
125
|
+
|
|
126
|
+
const content = await fs.readFile(spoolPath, "utf8");
|
|
127
|
+
expect(content).toContain("step");
|
|
128
|
+
} finally {
|
|
129
|
+
await fs.rm(spoolPath, { force: true });
|
|
130
|
+
if (previousSpool === undefined) {
|
|
131
|
+
delete process.env.LOGSDK_EMERGENCY_SPOOL_PATH;
|
|
132
|
+
} else {
|
|
133
|
+
process.env.LOGSDK_EMERGENCY_SPOOL_PATH = previousSpool;
|
|
134
|
+
}
|
|
135
|
+
}
|
|
136
|
+
});
|
|
137
|
+
|
|
138
|
+
test("fails when emergency spool cannot persist authoritative record", async () => {
|
|
139
|
+
const log = initLogSDK({
|
|
140
|
+
context: { build: "test" },
|
|
141
|
+
system: baseSystem,
|
|
142
|
+
sinks: [
|
|
143
|
+
{
|
|
144
|
+
sinkClass: "authoritative",
|
|
145
|
+
sink: {
|
|
146
|
+
emit() {
|
|
147
|
+
throw new Error("boom");
|
|
148
|
+
},
|
|
149
|
+
},
|
|
150
|
+
},
|
|
151
|
+
],
|
|
152
|
+
});
|
|
153
|
+
|
|
154
|
+
const spy = vi
|
|
155
|
+
.spyOn(spool, "writeEmergencySpool")
|
|
156
|
+
.mockRejectedValue(new Error("spool failure"));
|
|
157
|
+
|
|
158
|
+
try {
|
|
159
|
+
await expectNonCompliance(log.step("step"), AUTHORITATIVE_FAILURE_SWALLOWED);
|
|
160
|
+
} finally {
|
|
161
|
+
spy.mockRestore();
|
|
162
|
+
}
|
|
163
|
+
});
|
|
164
|
+
|
|
165
|
+
test("writes NDJSON for valid step", async () => {
|
|
166
|
+
const filePath = path.join(os.tmpdir(), `logsdk-step1-${Date.now()}.ndjson`);
|
|
167
|
+
const fileSink = createFileNdjsonSink(filePath);
|
|
168
|
+
|
|
169
|
+
const log = initLogSDK({
|
|
170
|
+
context: { build: "test" },
|
|
171
|
+
system: baseSystem,
|
|
172
|
+
sinks: [
|
|
173
|
+
{
|
|
174
|
+
sinkClass: "authoritative",
|
|
175
|
+
sink: fileSink,
|
|
176
|
+
},
|
|
177
|
+
],
|
|
178
|
+
});
|
|
179
|
+
|
|
180
|
+
await log.step("First step");
|
|
181
|
+
await log.step("Second step");
|
|
182
|
+
|
|
183
|
+
const content = await fs.readFile(filePath, "utf8");
|
|
184
|
+
const lines = content.split(/\r?\n/).filter(Boolean);
|
|
185
|
+
|
|
186
|
+
expect(lines.length).toBe(2);
|
|
187
|
+
const records = lines.map((line: string) => JSON.parse(line) as { record_version: string; message: string });
|
|
188
|
+
expect(records[0].record_version).toBe("log.step.v1");
|
|
189
|
+
expect(records[1].record_version).toBe("log.step.v1");
|
|
190
|
+
expect(records[0].message).toBe("First step");
|
|
191
|
+
expect(records[1].message).toBe("Second step");
|
|
192
|
+
});
|
|
@@ -0,0 +1,60 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
|
|
3
|
+
/**
|
|
4
|
+
* Generates test-traceability.json from Jest JSON output
|
|
5
|
+
* Mode: READ-ONLY / AUDIT-SAFE
|
|
6
|
+
*/
|
|
7
|
+
|
|
8
|
+
import fs from "fs";
|
|
9
|
+
import path from "path";
|
|
10
|
+
|
|
11
|
+
const RESULTS_PATH = path.resolve("evidence/test_results/test-results.json");
|
|
12
|
+
const MAP_PATH = path.resolve("tools/test_results/test-case-map.json");
|
|
13
|
+
const OUTPUT_PATH = path.resolve("evidence/test_results/test-traceability.json");
|
|
14
|
+
|
|
15
|
+
if (!fs.existsSync(RESULTS_PATH)) {
|
|
16
|
+
throw new Error(`Missing Jest results: ${RESULTS_PATH}`);
|
|
17
|
+
}
|
|
18
|
+
if (!fs.existsSync(MAP_PATH)) {
|
|
19
|
+
throw new Error(`Missing test case map: ${MAP_PATH}`);
|
|
20
|
+
}
|
|
21
|
+
|
|
22
|
+
const jestResults = JSON.parse(fs.readFileSync(RESULTS_PATH, "utf8"));
|
|
23
|
+
const testCaseMap = JSON.parse(fs.readFileSync(MAP_PATH, "utf8"));
|
|
24
|
+
|
|
25
|
+
const executedTests = new Map();
|
|
26
|
+
|
|
27
|
+
/**
|
|
28
|
+
* Collect per-test-file pass/fail
|
|
29
|
+
*/
|
|
30
|
+
for (const suite of jestResults.testResults || []) {
|
|
31
|
+
const file = path.basename(suite.name);
|
|
32
|
+
const status = suite.status === "passed" ? "PASS" : "FAIL";
|
|
33
|
+
executedTests.set(file, status);
|
|
34
|
+
}
|
|
35
|
+
|
|
36
|
+
const runId = new Date().toISOString();
|
|
37
|
+
|
|
38
|
+
const traceability = {};
|
|
39
|
+
|
|
40
|
+
for (const [tcId, files] of Object.entries(testCaseMap)) {
|
|
41
|
+
const statuses = files.map(f => executedTests.get(f) || "NOT_RUN");
|
|
42
|
+
|
|
43
|
+
let overall = "PASS";
|
|
44
|
+
if (statuses.includes("FAIL")) overall = "FAIL";
|
|
45
|
+
else if (statuses.includes("NOT_RUN")) overall = "INCOMPLETE";
|
|
46
|
+
|
|
47
|
+
traceability[tcId] = {
|
|
48
|
+
tests: files,
|
|
49
|
+
status: overall,
|
|
50
|
+
run_id: runId
|
|
51
|
+
};
|
|
52
|
+
}
|
|
53
|
+
|
|
54
|
+
fs.writeFileSync(
|
|
55
|
+
OUTPUT_PATH,
|
|
56
|
+
JSON.stringify(traceability, null, 2),
|
|
57
|
+
"utf8"
|
|
58
|
+
);
|
|
59
|
+
|
|
60
|
+
console.log(`✔ Test traceability written to ${OUTPUT_PATH}`);
|
|
@@ -0,0 +1,57 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
|
|
3
|
+
import fs from "fs";
|
|
4
|
+
import path from "path";
|
|
5
|
+
|
|
6
|
+
const RESULTS_PATH = path.resolve("evidence/test_results/test-results.json");
|
|
7
|
+
|
|
8
|
+
if (!fs.existsSync(RESULTS_PATH)) {
|
|
9
|
+
console.error(`Missing test results: ${RESULTS_PATH}`);
|
|
10
|
+
process.exit(1);
|
|
11
|
+
}
|
|
12
|
+
|
|
13
|
+
const raw = fs.readFileSync(RESULTS_PATH, "utf8");
|
|
14
|
+
let data;
|
|
15
|
+
try {
|
|
16
|
+
data = JSON.parse(raw);
|
|
17
|
+
} catch (e) {
|
|
18
|
+
console.error("Invalid JSON in test-results.json");
|
|
19
|
+
process.exit(2);
|
|
20
|
+
}
|
|
21
|
+
|
|
22
|
+
const numTotalTestSuites = data.numTotalTestSuites ?? 0;
|
|
23
|
+
const numPassedTestSuites = data.numPassedTestSuites ?? 0;
|
|
24
|
+
const numFailedTestSuites = data.numFailedTestSuites ?? 0;
|
|
25
|
+
const numPendingTestSuites = data.numPendingTestSuites ?? 0;
|
|
26
|
+
|
|
27
|
+
const numTotalTests = data.numTotalTests ?? 0;
|
|
28
|
+
const numPassedTests = data.numPassedTests ?? 0;
|
|
29
|
+
const numFailedTests = data.numFailedTests ?? 0;
|
|
30
|
+
const numPendingTests = data.numPendingTests ?? 0;
|
|
31
|
+
const numTodoTests = data.numTodoTests ?? 0;
|
|
32
|
+
|
|
33
|
+
const overall_status = (numFailedTestSuites > 0 || numFailedTests > 0) ? "FAILED" : "PASSED";
|
|
34
|
+
|
|
35
|
+
const summary = {
|
|
36
|
+
overall_status,
|
|
37
|
+
totals: {
|
|
38
|
+
test_suites: {
|
|
39
|
+
total: numTotalTestSuites,
|
|
40
|
+
passed: numPassedTestSuites,
|
|
41
|
+
failed: numFailedTestSuites,
|
|
42
|
+
pending: numPendingTestSuites,
|
|
43
|
+
},
|
|
44
|
+
tests: {
|
|
45
|
+
total: numTotalTests,
|
|
46
|
+
passed: numPassedTests,
|
|
47
|
+
failed: numFailedTests,
|
|
48
|
+
pending: numPendingTests,
|
|
49
|
+
todo: numTodoTests,
|
|
50
|
+
},
|
|
51
|
+
},
|
|
52
|
+
};
|
|
53
|
+
|
|
54
|
+
data.summary = summary;
|
|
55
|
+
|
|
56
|
+
fs.writeFileSync(RESULTS_PATH, JSON.stringify(data, null, 2) + "\n", "utf8");
|
|
57
|
+
console.log(`✔ Normalized and updated ${RESULTS_PATH}`);
|
|
@@ -0,0 +1,103 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
|
|
3
|
+
import { execSync } from "child_process";
|
|
4
|
+
import fs from "fs";
|
|
5
|
+
import path from "path";
|
|
6
|
+
|
|
7
|
+
function run(cmd) {
|
|
8
|
+
console.log(`→ ${cmd}`);
|
|
9
|
+
execSync(cmd, { stdio: "inherit", shell: true });
|
|
10
|
+
}
|
|
11
|
+
|
|
12
|
+
/* -----------------------------
|
|
13
|
+
* Paths
|
|
14
|
+
* --------------------------- */
|
|
15
|
+
const STRUCTURE_AUDIT =
|
|
16
|
+
"evidence/validators/validate-repo-structure/repo-structure-audit.json";
|
|
17
|
+
|
|
18
|
+
const TEST_RESULTS_DIR = "evidence/test_results";
|
|
19
|
+
const TEST_RESULTS = `${TEST_RESULTS_DIR}/test-results.json`;
|
|
20
|
+
|
|
21
|
+
/* -----------------------------
|
|
22
|
+
* Pipeline
|
|
23
|
+
* --------------------------- */
|
|
24
|
+
try {
|
|
25
|
+
/* 0. Run repo-structure validator */
|
|
26
|
+
run("node validators/bootstrap/validate-repo-structure.ts --root .");
|
|
27
|
+
|
|
28
|
+
if (!fs.existsSync(STRUCTURE_AUDIT)) {
|
|
29
|
+
console.error("✖ Missing repo-structure-audit.json");
|
|
30
|
+
process.exit(2);
|
|
31
|
+
}
|
|
32
|
+
|
|
33
|
+
const structureAudit = JSON.parse(
|
|
34
|
+
fs.readFileSync(STRUCTURE_AUDIT, "utf8")
|
|
35
|
+
);
|
|
36
|
+
const summary = structureAudit?.summary;
|
|
37
|
+
const riskLevel = summary?.risk_level;
|
|
38
|
+
|
|
39
|
+
if (!summary || summary.overall_status !== "COMPLIANT") {
|
|
40
|
+
console.error(
|
|
41
|
+
"✖ Repo structure validation not COMPLIANT. Aborting."
|
|
42
|
+
);
|
|
43
|
+
process.exit(3);
|
|
44
|
+
}
|
|
45
|
+
|
|
46
|
+
if (riskLevel !== "LOW") {
|
|
47
|
+
console.error(
|
|
48
|
+
`✖ Repo structure risk_level=${riskLevel ?? "UNKNOWN"}. Aborting.`
|
|
49
|
+
);
|
|
50
|
+
process.exit(3);
|
|
51
|
+
}
|
|
52
|
+
|
|
53
|
+
console.log("✔ Repo structure validation PASSED (risk_level=LOW)");
|
|
54
|
+
|
|
55
|
+
/* 1. Ensure test evidence directory exists */
|
|
56
|
+
fs.mkdirSync(TEST_RESULTS_DIR, { recursive: true });
|
|
57
|
+
|
|
58
|
+
/* 2. Run tests */
|
|
59
|
+
run(
|
|
60
|
+
`npx vitest run --reporter=json --silent > ${TEST_RESULTS}`
|
|
61
|
+
);
|
|
62
|
+
|
|
63
|
+
if (!fs.existsSync(TEST_RESULTS)) {
|
|
64
|
+
console.error("✖ Missing test-results.json");
|
|
65
|
+
process.exit(4);
|
|
66
|
+
}
|
|
67
|
+
|
|
68
|
+
const testResults = JSON.parse(
|
|
69
|
+
fs.readFileSync(TEST_RESULTS, "utf8")
|
|
70
|
+
);
|
|
71
|
+
|
|
72
|
+
if (testResults.success !== true) {
|
|
73
|
+
console.error("✖ Tests did not pass (success=false).");
|
|
74
|
+
process.exit(5);
|
|
75
|
+
}
|
|
76
|
+
|
|
77
|
+
console.log("✔ Tests passed (success=true)");
|
|
78
|
+
|
|
79
|
+
/* 3. Normalize + traceability */
|
|
80
|
+
run("node tools/test_results/normalize-test-results.js");
|
|
81
|
+
run("node tools/test_results/generate-test-traceability.js");
|
|
82
|
+
|
|
83
|
+
/* 4. Run prebuild if defined */
|
|
84
|
+
let hasPrebuild = false;
|
|
85
|
+
try {
|
|
86
|
+
const pkg = JSON.parse(fs.readFileSync("package.json", "utf8"));
|
|
87
|
+
hasPrebuild = !!pkg?.scripts?.prebuild;
|
|
88
|
+
} catch {
|
|
89
|
+
console.warn("⚠ Unable to read package.json; skipping prebuild.");
|
|
90
|
+
}
|
|
91
|
+
|
|
92
|
+
if (hasPrebuild) {
|
|
93
|
+
run("npm run prebuild");
|
|
94
|
+
console.log("✔ Prebuild completed successfully");
|
|
95
|
+
} else {
|
|
96
|
+
console.log("ℹ No prebuild script found; skipping prebuild.");
|
|
97
|
+
}
|
|
98
|
+
|
|
99
|
+
console.log("✔ VERIFY → TEST → PREBUILD pipeline completed successfully");
|
|
100
|
+
} catch (err) {
|
|
101
|
+
console.error("✖ Pipeline failed");
|
|
102
|
+
process.exit(1);
|
|
103
|
+
}
|
|
@@ -0,0 +1,9 @@
|
|
|
1
|
+
{
|
|
2
|
+
"TC-SEQ-001": ["sequence_monotonic.test.ts"],
|
|
3
|
+
"TC-CLK-001": ["sequence_monotonic.test.ts"],
|
|
4
|
+
"TC-REC-001": ["record_builder.test.ts", "step1_compliance.test.ts"],
|
|
5
|
+
"TC-FAN-001": ["fanout.test.ts"],
|
|
6
|
+
"TC-SPL-001": ["fanout_spool.test.ts"],
|
|
7
|
+
"TC-MSG-001": ["message_constraints.test.ts"],
|
|
8
|
+
"TC-SNK-001": ["sinks_file_ndjson.test.ts"]
|
|
9
|
+
}
|
package/tsconfig.json
ADDED
|
@@ -0,0 +1,31 @@
|
|
|
1
|
+
{
|
|
2
|
+
"compilerOptions": {
|
|
3
|
+
"target": "ES2022",
|
|
4
|
+
"module": "ES2022",
|
|
5
|
+
"moduleResolution": "Node",
|
|
6
|
+
"strict": true,
|
|
7
|
+
"rootDir": "src",
|
|
8
|
+
"outDir": "dist",
|
|
9
|
+
"declaration": true,
|
|
10
|
+
"declarationMap": true,
|
|
11
|
+
"sourceMap": true,
|
|
12
|
+
"esModuleInterop": true,
|
|
13
|
+
"forceConsistentCasingInFileNames": true,
|
|
14
|
+
"skipLibCheck": true,
|
|
15
|
+
"noEmitOnError": true,
|
|
16
|
+
"types": [
|
|
17
|
+
"node"
|
|
18
|
+
],
|
|
19
|
+
"lib": [
|
|
20
|
+
"ES2022"
|
|
21
|
+
]
|
|
22
|
+
},
|
|
23
|
+
"include": [
|
|
24
|
+
"src/**/*.ts"
|
|
25
|
+
],
|
|
26
|
+
"exclude": [
|
|
27
|
+
"node_modules",
|
|
28
|
+
"dist",
|
|
29
|
+
"**/*.js"
|
|
30
|
+
]
|
|
31
|
+
}
|