@mcptoolshop/a11y-evidence-engine 0.2.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.github/workflows/ci.yml +53 -0
- package/CODE_OF_CONDUCT.md +129 -0
- package/CONTRIBUTING.md +128 -0
- package/LICENSE +21 -0
- package/README.md +71 -0
- package/bin/a11y-engine.js +11 -0
- package/fixtures/bad/button-no-name.html +30 -0
- package/fixtures/bad/img-missing-alt.html +19 -0
- package/fixtures/bad/input-missing-label.html +26 -0
- package/fixtures/bad/missing-lang.html +11 -0
- package/fixtures/good/index.html +29 -0
- package/package.json +44 -0
- package/src/cli.js +74 -0
- package/src/evidence/canonicalize.js +52 -0
- package/src/evidence/json_pointer.js +34 -0
- package/src/evidence/prov_emit.js +153 -0
- package/src/fswalk.js +56 -0
- package/src/html_parse.js +117 -0
- package/src/ids.js +53 -0
- package/src/rules/document_missing_lang.js +50 -0
- package/src/rules/form_control_missing_label.js +105 -0
- package/src/rules/img_missing_alt.js +77 -0
- package/src/rules/index.js +37 -0
- package/src/rules/interactive_missing_name.js +129 -0
- package/src/scan.js +128 -0
- package/test/scan.test.js +149 -0
- package/test/vectors.test.js +200 -0
|
@@ -0,0 +1,37 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
|
|
3
|
+
const imgMissingAlt = require("./img_missing_alt.js");
|
|
4
|
+
const formControlMissingLabel = require("./form_control_missing_label.js");
|
|
5
|
+
const interactiveMissingName = require("./interactive_missing_name.js");
|
|
6
|
+
const documentMissingLang = require("./document_missing_lang.js");
|
|
7
|
+
|
|
8
|
+
/**
|
|
9
|
+
* All available rules.
|
|
10
|
+
* Each rule exports: { id, run(nodes, context) => findings[] }
|
|
11
|
+
*/
|
|
12
|
+
const rules = [
|
|
13
|
+
documentMissingLang,
|
|
14
|
+
imgMissingAlt,
|
|
15
|
+
formControlMissingLabel,
|
|
16
|
+
interactiveMissingName,
|
|
17
|
+
];
|
|
18
|
+
|
|
19
|
+
/**
|
|
20
|
+
* Run all rules against parsed HTML nodes.
|
|
21
|
+
*
|
|
22
|
+
* @param {Array} nodes - Flat nodes array from parseHtml
|
|
23
|
+
* @param {Object} context - { filePath, relativePath }
|
|
24
|
+
* @returns {Array} Raw findings (without finding_id assigned)
|
|
25
|
+
*/
|
|
26
|
+
function runRules(nodes, context) {
|
|
27
|
+
const findings = [];
|
|
28
|
+
|
|
29
|
+
for (const rule of rules) {
|
|
30
|
+
const ruleFindings = rule.run(nodes, context);
|
|
31
|
+
findings.push(...ruleFindings);
|
|
32
|
+
}
|
|
33
|
+
|
|
34
|
+
return findings;
|
|
35
|
+
}
|
|
36
|
+
|
|
37
|
+
module.exports = { rules, runRules };
|
|
@@ -0,0 +1,129 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
|
|
3
|
+
const { findElements, getTextContent, getElementById } = require("../html_parse.js");
|
|
4
|
+
|
|
5
|
+
const RULE_ID = "html.interactive.missing_name";
|
|
6
|
+
|
|
7
|
+
/**
|
|
8
|
+
* Check for interactive elements missing accessible names.
|
|
9
|
+
* WCAG 4.1.2 - Name, Role, Value (Level A)
|
|
10
|
+
*
|
|
11
|
+
* Checks:
|
|
12
|
+
* - <button> elements
|
|
13
|
+
* - <a href="..."> elements (links)
|
|
14
|
+
*
|
|
15
|
+
* An accessible name can come from:
|
|
16
|
+
* - Text content
|
|
17
|
+
* - aria-label
|
|
18
|
+
* - aria-labelledby
|
|
19
|
+
* - title attribute (fallback)
|
|
20
|
+
*/
|
|
21
|
+
function run(nodes, context) {
|
|
22
|
+
const findings = [];
|
|
23
|
+
|
|
24
|
+
// Check buttons
|
|
25
|
+
const buttons = findElements(nodes, (n) => n.tagName === "button");
|
|
26
|
+
for (const node of buttons) {
|
|
27
|
+
if (!hasAccessibleName(node, nodes)) {
|
|
28
|
+
findings.push({
|
|
29
|
+
rule_id: RULE_ID,
|
|
30
|
+
severity: "error",
|
|
31
|
+
confidence: 0.95,
|
|
32
|
+
message: "Button element is missing an accessible name.",
|
|
33
|
+
location: {
|
|
34
|
+
file: context.relativePath,
|
|
35
|
+
json_pointer: `/nodes/${node.index}`,
|
|
36
|
+
},
|
|
37
|
+
evidence: {
|
|
38
|
+
tagName: node.tagName,
|
|
39
|
+
attrs: filterAttrs(node.attrs, [
|
|
40
|
+
"id",
|
|
41
|
+
"type",
|
|
42
|
+
"aria-label",
|
|
43
|
+
"aria-labelledby",
|
|
44
|
+
"title",
|
|
45
|
+
]),
|
|
46
|
+
textContent: getTextContent(node).substring(0, 50),
|
|
47
|
+
},
|
|
48
|
+
});
|
|
49
|
+
}
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
// Check links (anchors with href)
|
|
53
|
+
const links = findElements(
|
|
54
|
+
nodes,
|
|
55
|
+
(n) => n.tagName === "a" && n.attrs.href !== undefined
|
|
56
|
+
);
|
|
57
|
+
for (const node of links) {
|
|
58
|
+
if (!hasAccessibleName(node, nodes)) {
|
|
59
|
+
findings.push({
|
|
60
|
+
rule_id: RULE_ID,
|
|
61
|
+
severity: "error",
|
|
62
|
+
confidence: 0.95,
|
|
63
|
+
message: "Link element is missing an accessible name.",
|
|
64
|
+
location: {
|
|
65
|
+
file: context.relativePath,
|
|
66
|
+
json_pointer: `/nodes/${node.index}`,
|
|
67
|
+
},
|
|
68
|
+
evidence: {
|
|
69
|
+
tagName: node.tagName,
|
|
70
|
+
attrs: filterAttrs(node.attrs, [
|
|
71
|
+
"href",
|
|
72
|
+
"aria-label",
|
|
73
|
+
"aria-labelledby",
|
|
74
|
+
"title",
|
|
75
|
+
]),
|
|
76
|
+
textContent: getTextContent(node).substring(0, 50),
|
|
77
|
+
},
|
|
78
|
+
});
|
|
79
|
+
}
|
|
80
|
+
}
|
|
81
|
+
|
|
82
|
+
return findings;
|
|
83
|
+
}
|
|
84
|
+
|
|
85
|
+
/**
|
|
86
|
+
* Check if an element has an accessible name.
|
|
87
|
+
*/
|
|
88
|
+
function hasAccessibleName(node, nodes) {
|
|
89
|
+
// Check aria-label
|
|
90
|
+
if (node.attrs["aria-label"] && node.attrs["aria-label"].trim()) {
|
|
91
|
+
return true;
|
|
92
|
+
}
|
|
93
|
+
|
|
94
|
+
// Check aria-labelledby
|
|
95
|
+
if (node.attrs["aria-labelledby"]) {
|
|
96
|
+
const ids = node.attrs["aria-labelledby"].split(/\s+/);
|
|
97
|
+
for (const id of ids) {
|
|
98
|
+
const labelElement = getElementById(nodes, id);
|
|
99
|
+
if (labelElement && getTextContent(labelElement).trim()) {
|
|
100
|
+
return true;
|
|
101
|
+
}
|
|
102
|
+
}
|
|
103
|
+
}
|
|
104
|
+
|
|
105
|
+
// Check text content
|
|
106
|
+
const textContent = getTextContent(node).trim();
|
|
107
|
+
if (textContent) {
|
|
108
|
+
return true;
|
|
109
|
+
}
|
|
110
|
+
|
|
111
|
+
// Check title as fallback
|
|
112
|
+
if (node.attrs.title && node.attrs.title.trim()) {
|
|
113
|
+
return true;
|
|
114
|
+
}
|
|
115
|
+
|
|
116
|
+
return false;
|
|
117
|
+
}
|
|
118
|
+
|
|
119
|
+
function filterAttrs(attrs, keys) {
|
|
120
|
+
const filtered = {};
|
|
121
|
+
for (const key of keys) {
|
|
122
|
+
if (key in attrs) {
|
|
123
|
+
filtered[key] = attrs[key];
|
|
124
|
+
}
|
|
125
|
+
}
|
|
126
|
+
return filtered;
|
|
127
|
+
}
|
|
128
|
+
|
|
129
|
+
module.exports = { id: RULE_ID, run };
|
package/src/scan.js
ADDED
|
@@ -0,0 +1,128 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
|
|
3
|
+
const fs = require("fs");
|
|
4
|
+
const path = require("path");
|
|
5
|
+
const { walkHtmlFiles } = require("./fswalk.js");
|
|
6
|
+
const { parseHtml } = require("./html_parse.js");
|
|
7
|
+
const { runRules } = require("./rules/index.js");
|
|
8
|
+
const { assignFindingIds } = require("./ids.js");
|
|
9
|
+
const { emitProvenance } = require("./evidence/prov_emit.js");
|
|
10
|
+
|
|
11
|
+
const ENGINE_NAME = "a11y-evidence-engine";
|
|
12
|
+
const ENGINE_VERSION = "0.1.0";
|
|
13
|
+
|
|
14
|
+
/**
|
|
15
|
+
* Scan HTML files and produce findings with provenance.
|
|
16
|
+
*
|
|
17
|
+
* @param {string} targetPath - File or directory to scan
|
|
18
|
+
* @param {string} outDir - Output directory
|
|
19
|
+
* @returns {Object} Scan result with summary
|
|
20
|
+
*/
|
|
21
|
+
async function scan(targetPath, outDir) {
|
|
22
|
+
const resolvedTarget = path.resolve(targetPath);
|
|
23
|
+
const resolvedOut = path.resolve(outDir);
|
|
24
|
+
|
|
25
|
+
// Gather HTML files
|
|
26
|
+
const files = walkHtmlFiles(resolvedTarget);
|
|
27
|
+
|
|
28
|
+
if (files.length === 0) {
|
|
29
|
+
throw new Error(`No HTML files found in: ${resolvedTarget}`);
|
|
30
|
+
}
|
|
31
|
+
|
|
32
|
+
// Collect all findings
|
|
33
|
+
const allFindings = [];
|
|
34
|
+
const baseDir = fs.statSync(resolvedTarget).isDirectory()
|
|
35
|
+
? resolvedTarget
|
|
36
|
+
: path.dirname(resolvedTarget);
|
|
37
|
+
|
|
38
|
+
for (const filePath of files) {
|
|
39
|
+
const html = fs.readFileSync(filePath, "utf8");
|
|
40
|
+
const { nodes } = parseHtml(html);
|
|
41
|
+
|
|
42
|
+
const relativePath = path.relative(baseDir, filePath).replace(/\\/g, "/");
|
|
43
|
+
|
|
44
|
+
const context = {
|
|
45
|
+
filePath,
|
|
46
|
+
relativePath,
|
|
47
|
+
};
|
|
48
|
+
|
|
49
|
+
const findings = runRules(nodes, context);
|
|
50
|
+
allFindings.push(...findings);
|
|
51
|
+
}
|
|
52
|
+
|
|
53
|
+
// Assign deterministic IDs
|
|
54
|
+
const numberedFindings = assignFindingIds(allFindings);
|
|
55
|
+
|
|
56
|
+
// Create output directory
|
|
57
|
+
fs.mkdirSync(resolvedOut, { recursive: true });
|
|
58
|
+
|
|
59
|
+
// Generate timestamp for all provenance records (deterministic within scan)
|
|
60
|
+
const timestamp = new Date().toISOString();
|
|
61
|
+
|
|
62
|
+
// Emit provenance for each finding
|
|
63
|
+
const findingsWithRefs = [];
|
|
64
|
+
|
|
65
|
+
for (const finding of numberedFindings) {
|
|
66
|
+
const provDir = path.join(resolvedOut, "provenance", finding.finding_id);
|
|
67
|
+
fs.mkdirSync(provDir, { recursive: true });
|
|
68
|
+
|
|
69
|
+
const { record, digest, envelope } = emitProvenance(finding, {
|
|
70
|
+
engineVersion: ENGINE_VERSION,
|
|
71
|
+
timestamp,
|
|
72
|
+
});
|
|
73
|
+
|
|
74
|
+
// Write provenance files
|
|
75
|
+
fs.writeFileSync(
|
|
76
|
+
path.join(provDir, "record.json"),
|
|
77
|
+
JSON.stringify(record, null, 2)
|
|
78
|
+
);
|
|
79
|
+
fs.writeFileSync(
|
|
80
|
+
path.join(provDir, "digest.json"),
|
|
81
|
+
JSON.stringify(digest, null, 2)
|
|
82
|
+
);
|
|
83
|
+
fs.writeFileSync(
|
|
84
|
+
path.join(provDir, "envelope.json"),
|
|
85
|
+
JSON.stringify(envelope, null, 2)
|
|
86
|
+
);
|
|
87
|
+
|
|
88
|
+
// Add evidence_ref to finding (without the raw evidence)
|
|
89
|
+
const { evidence, ...findingWithoutEvidence } = finding;
|
|
90
|
+
findingsWithRefs.push({
|
|
91
|
+
...findingWithoutEvidence,
|
|
92
|
+
evidence_ref: {
|
|
93
|
+
record: `provenance/${finding.finding_id}/record.json`,
|
|
94
|
+
digest: `provenance/${finding.finding_id}/digest.json`,
|
|
95
|
+
envelope: `provenance/${finding.finding_id}/envelope.json`,
|
|
96
|
+
},
|
|
97
|
+
});
|
|
98
|
+
}
|
|
99
|
+
|
|
100
|
+
// Build summary
|
|
101
|
+
const summary = {
|
|
102
|
+
files_scanned: files.length,
|
|
103
|
+
errors: findingsWithRefs.filter((f) => f.severity === "error").length,
|
|
104
|
+
warnings: findingsWithRefs.filter((f) => f.severity === "warning").length,
|
|
105
|
+
info: findingsWithRefs.filter((f) => f.severity === "info").length,
|
|
106
|
+
};
|
|
107
|
+
|
|
108
|
+
// Build output
|
|
109
|
+
const output = {
|
|
110
|
+
engine: ENGINE_NAME,
|
|
111
|
+
version: ENGINE_VERSION,
|
|
112
|
+
target: {
|
|
113
|
+
path: path.relative(process.cwd(), resolvedTarget).replace(/\\/g, "/") || ".",
|
|
114
|
+
},
|
|
115
|
+
summary,
|
|
116
|
+
findings: findingsWithRefs,
|
|
117
|
+
};
|
|
118
|
+
|
|
119
|
+
// Write findings.json
|
|
120
|
+
fs.writeFileSync(
|
|
121
|
+
path.join(resolvedOut, "findings.json"),
|
|
122
|
+
JSON.stringify(output, null, 2)
|
|
123
|
+
);
|
|
124
|
+
|
|
125
|
+
return output;
|
|
126
|
+
}
|
|
127
|
+
|
|
128
|
+
module.exports = { scan };
|
|
@@ -0,0 +1,149 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
|
|
3
|
+
const { describe, it, before, after } = require("node:test");
|
|
4
|
+
const assert = require("node:assert");
|
|
5
|
+
const fs = require("fs");
|
|
6
|
+
const path = require("path");
|
|
7
|
+
const { scan } = require("../src/scan.js");
|
|
8
|
+
|
|
9
|
+
const FIXTURES_DIR = path.join(__dirname, "..", "fixtures");
|
|
10
|
+
const TEST_OUT_DIR = path.join(__dirname, "..", "test-output");
|
|
11
|
+
|
|
12
|
+
describe("scan", () => {
|
|
13
|
+
before(() => {
|
|
14
|
+
// Clean up test output
|
|
15
|
+
if (fs.existsSync(TEST_OUT_DIR)) {
|
|
16
|
+
fs.rmSync(TEST_OUT_DIR, { recursive: true });
|
|
17
|
+
}
|
|
18
|
+
});
|
|
19
|
+
|
|
20
|
+
after(() => {
|
|
21
|
+
// Clean up test output
|
|
22
|
+
if (fs.existsSync(TEST_OUT_DIR)) {
|
|
23
|
+
fs.rmSync(TEST_OUT_DIR, { recursive: true });
|
|
24
|
+
}
|
|
25
|
+
});
|
|
26
|
+
|
|
27
|
+
describe("good fixtures", () => {
|
|
28
|
+
it("should find no errors in accessible HTML", async () => {
|
|
29
|
+
const outDir = path.join(TEST_OUT_DIR, "good");
|
|
30
|
+
const result = await scan(path.join(FIXTURES_DIR, "good"), outDir);
|
|
31
|
+
|
|
32
|
+
assert.strictEqual(result.summary.errors, 0);
|
|
33
|
+
assert.strictEqual(result.findings.length, 0);
|
|
34
|
+
});
|
|
35
|
+
});
|
|
36
|
+
|
|
37
|
+
describe("bad fixtures", () => {
|
|
38
|
+
it("should detect missing alt text", async () => {
|
|
39
|
+
const outDir = path.join(TEST_OUT_DIR, "img-missing-alt");
|
|
40
|
+
const result = await scan(
|
|
41
|
+
path.join(FIXTURES_DIR, "bad", "img-missing-alt.html"),
|
|
42
|
+
outDir
|
|
43
|
+
);
|
|
44
|
+
|
|
45
|
+
assert.strictEqual(result.summary.errors, 2);
|
|
46
|
+
|
|
47
|
+
const altFindings = result.findings.filter(
|
|
48
|
+
(f) => f.rule_id === "html.img.missing_alt"
|
|
49
|
+
);
|
|
50
|
+
assert.strictEqual(altFindings.length, 2);
|
|
51
|
+
|
|
52
|
+
// Check evidence_ref exists
|
|
53
|
+
for (const finding of altFindings) {
|
|
54
|
+
assert.ok(finding.evidence_ref);
|
|
55
|
+
assert.ok(finding.evidence_ref.record);
|
|
56
|
+
assert.ok(finding.evidence_ref.digest);
|
|
57
|
+
assert.ok(finding.evidence_ref.envelope);
|
|
58
|
+
}
|
|
59
|
+
});
|
|
60
|
+
|
|
61
|
+
it("should detect missing labels", async () => {
|
|
62
|
+
const outDir = path.join(TEST_OUT_DIR, "input-missing-label");
|
|
63
|
+
const result = await scan(
|
|
64
|
+
path.join(FIXTURES_DIR, "bad", "input-missing-label.html"),
|
|
65
|
+
outDir
|
|
66
|
+
);
|
|
67
|
+
|
|
68
|
+
const labelFindings = result.findings.filter(
|
|
69
|
+
(f) => f.rule_id === "html.form_control.missing_label"
|
|
70
|
+
);
|
|
71
|
+
assert.strictEqual(labelFindings.length, 2);
|
|
72
|
+
});
|
|
73
|
+
|
|
74
|
+
it("should detect missing accessible names", async () => {
|
|
75
|
+
const outDir = path.join(TEST_OUT_DIR, "button-no-name");
|
|
76
|
+
const result = await scan(
|
|
77
|
+
path.join(FIXTURES_DIR, "bad", "button-no-name.html"),
|
|
78
|
+
outDir
|
|
79
|
+
);
|
|
80
|
+
|
|
81
|
+
const nameFindings = result.findings.filter(
|
|
82
|
+
(f) => f.rule_id === "html.interactive.missing_name"
|
|
83
|
+
);
|
|
84
|
+
// 2 empty buttons + 1 empty link = 3
|
|
85
|
+
assert.strictEqual(nameFindings.length, 3);
|
|
86
|
+
});
|
|
87
|
+
|
|
88
|
+
it("should detect missing lang attribute", async () => {
|
|
89
|
+
const outDir = path.join(TEST_OUT_DIR, "missing-lang");
|
|
90
|
+
const result = await scan(
|
|
91
|
+
path.join(FIXTURES_DIR, "bad", "missing-lang.html"),
|
|
92
|
+
outDir
|
|
93
|
+
);
|
|
94
|
+
|
|
95
|
+
const langFindings = result.findings.filter(
|
|
96
|
+
(f) => f.rule_id === "html.document.missing_lang"
|
|
97
|
+
);
|
|
98
|
+
assert.strictEqual(langFindings.length, 1);
|
|
99
|
+
});
|
|
100
|
+
});
|
|
101
|
+
|
|
102
|
+
describe("output structure", () => {
|
|
103
|
+
it("should produce findings.json with correct structure", async () => {
|
|
104
|
+
const outDir = path.join(TEST_OUT_DIR, "structure");
|
|
105
|
+
const result = await scan(
|
|
106
|
+
path.join(FIXTURES_DIR, "bad", "img-missing-alt.html"),
|
|
107
|
+
outDir
|
|
108
|
+
);
|
|
109
|
+
|
|
110
|
+
// Check top-level structure
|
|
111
|
+
assert.strictEqual(result.engine, "a11y-evidence-engine");
|
|
112
|
+
assert.strictEqual(result.version, "0.1.0");
|
|
113
|
+
assert.ok(result.target);
|
|
114
|
+
assert.ok(result.summary);
|
|
115
|
+
assert.ok(Array.isArray(result.findings));
|
|
116
|
+
|
|
117
|
+
// Check findings.json was written
|
|
118
|
+
const findingsPath = path.join(outDir, "findings.json");
|
|
119
|
+
assert.ok(fs.existsSync(findingsPath));
|
|
120
|
+
|
|
121
|
+
const written = JSON.parse(fs.readFileSync(findingsPath, "utf8"));
|
|
122
|
+
assert.deepStrictEqual(written, result);
|
|
123
|
+
});
|
|
124
|
+
|
|
125
|
+
it("should assign deterministic finding IDs", async () => {
|
|
126
|
+
const outDir = path.join(TEST_OUT_DIR, "deterministic");
|
|
127
|
+
const result = await scan(
|
|
128
|
+
path.join(FIXTURES_DIR, "bad", "img-missing-alt.html"),
|
|
129
|
+
outDir
|
|
130
|
+
);
|
|
131
|
+
|
|
132
|
+
assert.strictEqual(result.findings[0].finding_id, "finding-0001");
|
|
133
|
+
assert.strictEqual(result.findings[1].finding_id, "finding-0002");
|
|
134
|
+
});
|
|
135
|
+
});
|
|
136
|
+
|
|
137
|
+
describe("directory scanning", () => {
|
|
138
|
+
it("should scan all HTML files in a directory", async () => {
|
|
139
|
+
const outDir = path.join(TEST_OUT_DIR, "all-bad");
|
|
140
|
+
const result = await scan(path.join(FIXTURES_DIR, "bad"), outDir);
|
|
141
|
+
|
|
142
|
+
// Should scan all 4 bad fixture files
|
|
143
|
+
assert.strictEqual(result.summary.files_scanned, 4);
|
|
144
|
+
|
|
145
|
+
// Should find issues from all files
|
|
146
|
+
assert.ok(result.summary.errors > 0);
|
|
147
|
+
});
|
|
148
|
+
});
|
|
149
|
+
});
|
|
@@ -0,0 +1,200 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
|
|
3
|
+
const { describe, it, before, after } = require("node:test");
|
|
4
|
+
const assert = require("node:assert");
|
|
5
|
+
const crypto = require("crypto");
|
|
6
|
+
const fs = require("fs");
|
|
7
|
+
const path = require("path");
|
|
8
|
+
const { scan } = require("../src/scan.js");
|
|
9
|
+
const { canonicalize } = require("../src/evidence/canonicalize.js");
|
|
10
|
+
|
|
11
|
+
const FIXTURES_DIR = path.join(__dirname, "..", "fixtures");
|
|
12
|
+
const TEST_OUT_DIR = path.join(__dirname, "..", "test-output-vectors");
|
|
13
|
+
|
|
14
|
+
describe("provenance vectors", () => {
|
|
15
|
+
before(() => {
|
|
16
|
+
if (fs.existsSync(TEST_OUT_DIR)) {
|
|
17
|
+
fs.rmSync(TEST_OUT_DIR, { recursive: true });
|
|
18
|
+
}
|
|
19
|
+
});
|
|
20
|
+
|
|
21
|
+
after(() => {
|
|
22
|
+
if (fs.existsSync(TEST_OUT_DIR)) {
|
|
23
|
+
fs.rmSync(TEST_OUT_DIR, { recursive: true });
|
|
24
|
+
}
|
|
25
|
+
});
|
|
26
|
+
|
|
27
|
+
describe("provenance file emission", () => {
|
|
28
|
+
it("should emit record.json, digest.json, envelope.json for each finding", async () => {
|
|
29
|
+
const outDir = path.join(TEST_OUT_DIR, "prov-files");
|
|
30
|
+
const result = await scan(
|
|
31
|
+
path.join(FIXTURES_DIR, "bad", "img-missing-alt.html"),
|
|
32
|
+
outDir
|
|
33
|
+
);
|
|
34
|
+
|
|
35
|
+
for (const finding of result.findings) {
|
|
36
|
+
const provDir = path.join(outDir, "provenance", finding.finding_id);
|
|
37
|
+
|
|
38
|
+
assert.ok(fs.existsSync(path.join(provDir, "record.json")));
|
|
39
|
+
assert.ok(fs.existsSync(path.join(provDir, "digest.json")));
|
|
40
|
+
assert.ok(fs.existsSync(path.join(provDir, "envelope.json")));
|
|
41
|
+
}
|
|
42
|
+
});
|
|
43
|
+
});
|
|
44
|
+
|
|
45
|
+
describe("digest verification", () => {
|
|
46
|
+
it("should produce verifiable SHA-256 digests", async () => {
|
|
47
|
+
const outDir = path.join(TEST_OUT_DIR, "digest-verify");
|
|
48
|
+
const result = await scan(
|
|
49
|
+
path.join(FIXTURES_DIR, "bad", "img-missing-alt.html"),
|
|
50
|
+
outDir
|
|
51
|
+
);
|
|
52
|
+
|
|
53
|
+
for (const finding of result.findings) {
|
|
54
|
+
const provDir = path.join(outDir, "provenance", finding.finding_id);
|
|
55
|
+
|
|
56
|
+
// Read the record and digest
|
|
57
|
+
const record = JSON.parse(
|
|
58
|
+
fs.readFileSync(path.join(provDir, "record.json"), "utf8")
|
|
59
|
+
);
|
|
60
|
+
const digest = JSON.parse(
|
|
61
|
+
fs.readFileSync(path.join(provDir, "digest.json"), "utf8")
|
|
62
|
+
);
|
|
63
|
+
|
|
64
|
+
// Extract evidence from record
|
|
65
|
+
const evidence =
|
|
66
|
+
record["prov.record.v0.1"].outputs[0]["artifact.v0.1"].content;
|
|
67
|
+
|
|
68
|
+
// Extract expected digest
|
|
69
|
+
const expectedDigest =
|
|
70
|
+
digest["prov.record.v0.1"].outputs[0]["artifact.v0.1"].digest.value;
|
|
71
|
+
|
|
72
|
+
// Compute actual digest
|
|
73
|
+
const canonical = canonicalize(evidence);
|
|
74
|
+
const actualDigest = crypto
|
|
75
|
+
.createHash("sha256")
|
|
76
|
+
.update(canonical, "utf8")
|
|
77
|
+
.digest("hex");
|
|
78
|
+
|
|
79
|
+
assert.strictEqual(
|
|
80
|
+
actualDigest,
|
|
81
|
+
expectedDigest,
|
|
82
|
+
`Digest mismatch for ${finding.finding_id}`
|
|
83
|
+
);
|
|
84
|
+
}
|
|
85
|
+
});
|
|
86
|
+
});
|
|
87
|
+
|
|
88
|
+
describe("record structure", () => {
|
|
89
|
+
it("should emit valid engine.extract.evidence.json_pointer records", async () => {
|
|
90
|
+
const outDir = path.join(TEST_OUT_DIR, "record-structure");
|
|
91
|
+
const result = await scan(
|
|
92
|
+
path.join(FIXTURES_DIR, "bad", "img-missing-alt.html"),
|
|
93
|
+
outDir
|
|
94
|
+
);
|
|
95
|
+
|
|
96
|
+
const provDir = path.join(
|
|
97
|
+
outDir,
|
|
98
|
+
"provenance",
|
|
99
|
+
result.findings[0].finding_id
|
|
100
|
+
);
|
|
101
|
+
const record = JSON.parse(
|
|
102
|
+
fs.readFileSync(path.join(provDir, "record.json"), "utf8")
|
|
103
|
+
);
|
|
104
|
+
|
|
105
|
+
const prov = record["prov.record.v0.1"];
|
|
106
|
+
|
|
107
|
+
assert.strictEqual(prov.method_id, "engine.extract.evidence.json_pointer");
|
|
108
|
+
assert.ok(prov.timestamp);
|
|
109
|
+
assert.ok(prov.inputs);
|
|
110
|
+
assert.ok(prov.outputs);
|
|
111
|
+
assert.ok(prov.agent);
|
|
112
|
+
assert.strictEqual(prov.agent.name, "a11y-evidence-engine");
|
|
113
|
+
});
|
|
114
|
+
|
|
115
|
+
it("should emit valid integrity.digest.sha256 records", async () => {
|
|
116
|
+
const outDir = path.join(TEST_OUT_DIR, "digest-structure");
|
|
117
|
+
const result = await scan(
|
|
118
|
+
path.join(FIXTURES_DIR, "bad", "img-missing-alt.html"),
|
|
119
|
+
outDir
|
|
120
|
+
);
|
|
121
|
+
|
|
122
|
+
const provDir = path.join(
|
|
123
|
+
outDir,
|
|
124
|
+
"provenance",
|
|
125
|
+
result.findings[0].finding_id
|
|
126
|
+
);
|
|
127
|
+
const digest = JSON.parse(
|
|
128
|
+
fs.readFileSync(path.join(provDir, "digest.json"), "utf8")
|
|
129
|
+
);
|
|
130
|
+
|
|
131
|
+
const prov = digest["prov.record.v0.1"];
|
|
132
|
+
|
|
133
|
+
assert.strictEqual(prov.method_id, "integrity.digest.sha256");
|
|
134
|
+
assert.ok(prov.outputs[0]["artifact.v0.1"].digest);
|
|
135
|
+
assert.strictEqual(
|
|
136
|
+
prov.outputs[0]["artifact.v0.1"].digest.algorithm,
|
|
137
|
+
"sha256"
|
|
138
|
+
);
|
|
139
|
+
|
|
140
|
+
// Digest value should be 64 hex chars
|
|
141
|
+
const digestValue = prov.outputs[0]["artifact.v0.1"].digest.value;
|
|
142
|
+
assert.strictEqual(digestValue.length, 64);
|
|
143
|
+
assert.match(digestValue, /^[a-f0-9]+$/);
|
|
144
|
+
});
|
|
145
|
+
|
|
146
|
+
it("should emit valid adapter.wrap.envelope_v0_1 records", async () => {
|
|
147
|
+
const outDir = path.join(TEST_OUT_DIR, "envelope-structure");
|
|
148
|
+
const result = await scan(
|
|
149
|
+
path.join(FIXTURES_DIR, "bad", "img-missing-alt.html"),
|
|
150
|
+
outDir
|
|
151
|
+
);
|
|
152
|
+
|
|
153
|
+
const provDir = path.join(
|
|
154
|
+
outDir,
|
|
155
|
+
"provenance",
|
|
156
|
+
result.findings[0].finding_id
|
|
157
|
+
);
|
|
158
|
+
const envelope = JSON.parse(
|
|
159
|
+
fs.readFileSync(path.join(provDir, "envelope.json"), "utf8")
|
|
160
|
+
);
|
|
161
|
+
|
|
162
|
+
assert.ok(envelope["mcp.envelope.v0.1"]);
|
|
163
|
+
assert.ok(envelope["mcp.envelope.v0.1"].result);
|
|
164
|
+
assert.ok(envelope["mcp.envelope.v0.1"].provenance);
|
|
165
|
+
|
|
166
|
+
const prov = envelope["mcp.envelope.v0.1"].provenance["prov.record.v0.1"];
|
|
167
|
+
assert.strictEqual(prov.method_id, "adapter.wrap.envelope_v0_1");
|
|
168
|
+
});
|
|
169
|
+
});
|
|
170
|
+
|
|
171
|
+
describe("canonicalization", () => {
|
|
172
|
+
it("should canonicalize JSON correctly", () => {
|
|
173
|
+
// Test sorted keys
|
|
174
|
+
const obj = { z: 1, a: 2, m: 3 };
|
|
175
|
+
assert.strictEqual(canonicalize(obj), '{"a":2,"m":3,"z":1}');
|
|
176
|
+
|
|
177
|
+
// Test nested objects
|
|
178
|
+
const nested = { b: { d: 1, c: 2 }, a: 1 };
|
|
179
|
+
assert.strictEqual(canonicalize(nested), '{"a":1,"b":{"c":2,"d":1}}');
|
|
180
|
+
|
|
181
|
+
// Test arrays (preserve order)
|
|
182
|
+
const arr = [3, 1, 2];
|
|
183
|
+
assert.strictEqual(canonicalize(arr), "[3,1,2]");
|
|
184
|
+
|
|
185
|
+
// Test strings with escaping
|
|
186
|
+
assert.strictEqual(canonicalize('hello "world"'), '"hello \\"world\\""');
|
|
187
|
+
|
|
188
|
+
// Test null and booleans
|
|
189
|
+
assert.strictEqual(canonicalize(null), "null");
|
|
190
|
+
assert.strictEqual(canonicalize(true), "true");
|
|
191
|
+
assert.strictEqual(canonicalize(false), "false");
|
|
192
|
+
});
|
|
193
|
+
|
|
194
|
+
it("should reject non-finite numbers", () => {
|
|
195
|
+
assert.throws(() => canonicalize(Infinity));
|
|
196
|
+
assert.throws(() => canonicalize(-Infinity));
|
|
197
|
+
assert.throws(() => canonicalize(NaN));
|
|
198
|
+
});
|
|
199
|
+
});
|
|
200
|
+
});
|