theaccessible-audit-ci 0.2.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +93 -0
- package/dist/api-client.d.ts +60 -0
- package/dist/api-client.js +105 -0
- package/dist/artifact.d.ts +39 -0
- package/dist/artifact.js +80 -0
- package/dist/cli.d.ts +2 -0
- package/dist/cli.js +228 -0
- package/dist/config.d.ts +107 -0
- package/dist/config.js +60 -0
- package/dist/exit-codes.d.ts +16 -0
- package/dist/exit-codes.js +25 -0
- package/dist/git.d.ts +14 -0
- package/dist/git.js +39 -0
- package/dist/index.d.ts +5 -0
- package/dist/index.js +5 -0
- package/dist/output.d.ts +7 -0
- package/dist/output.js +37 -0
- package/dist/poll.d.ts +7 -0
- package/dist/poll.js +14 -0
- package/package.json +46 -0
package/README.md
ADDED
|
@@ -0,0 +1,93 @@
|
|
|
1
|
+
# theaccessible-audit-ci
|
|
2
|
+
|
|
3
|
+
CLI for TheAccessible CI/CD integration. Grades a target (URL, build artifact, or VPAT file) against WCAG 2.1 AA / Section 508 and emits a stored, queryable compliance record per commit.
|
|
4
|
+
|
|
5
|
+
## Install
|
|
6
|
+
|
|
7
|
+
```bash
|
|
8
|
+
npm install -g theaccessible-audit-ci
|
|
9
|
+
```
|
|
10
|
+
|
|
11
|
+
## Authenticate
|
|
12
|
+
|
|
13
|
+
Create an audit API key at <https://theaccessible.org/settings/api-keys>, scope it to a single GitHub-style `owner/repo`, then set:
|
|
14
|
+
|
|
15
|
+
```bash
|
|
16
|
+
export THEACCESSIBLE_API_KEY="tac_…"
|
|
17
|
+
```
|
|
18
|
+
|
|
19
|
+
The CLI **never** logs the raw key — it's masked in all stderr output (`tac_xy…wxyz`). Override the API host with `THEACCESSIBLE_API_URL` if you're using a self-hosted deployment.
|
|
20
|
+
|
|
21
|
+
## Configure
|
|
22
|
+
|
|
23
|
+
Drop a `.theaccessible.yml` at your repo root:
|
|
24
|
+
|
|
25
|
+
```yaml
|
|
26
|
+
version: 1
|
|
27
|
+
targets:
|
|
28
|
+
- name: marketing-site
|
|
29
|
+
type: url
|
|
30
|
+
url: https://staging.theaccessible.org
|
|
31
|
+
- name: pdf-app-build
|
|
32
|
+
type: build-artifact
|
|
33
|
+
path: ./apps/web/dist
|
|
34
|
+
gate:
|
|
35
|
+
mode: advisory # advisory | blocking
|
|
36
|
+
min_grade: B # A | B | C | D | F
|
|
37
|
+
fail_on: [] # new_critical_issues | grade_regression | min_grade_violation
|
|
38
|
+
notifications:
|
|
39
|
+
email:
|
|
40
|
+
digest: weekly
|
|
41
|
+
recipients: [a11y@example.com]
|
|
42
|
+
timeout_seconds: 300
|
|
43
|
+
```
|
|
44
|
+
|
|
45
|
+
**`gate.mode` defaults to `advisory` on first install.** Advisory mode prints warnings and exits 0, so installing the CLI never breaks a green build. Switch to `blocking` only after your team has reviewed a few runs and agreed on which gates to enforce.
|
|
46
|
+
|
|
47
|
+
## Commands
|
|
48
|
+
|
|
49
|
+
```bash
|
|
50
|
+
theaccessible audit [target] # run an audit (default: first target)
|
|
51
|
+
theaccessible diff <base-sha> <head-sha> # show regression delta between two prior runs
|
|
52
|
+
theaccessible report <commit-sha> # fetch stored reports for a commit
|
|
53
|
+
```
|
|
54
|
+
|
|
55
|
+
### `audit` options
|
|
56
|
+
|
|
57
|
+
| Flag | Default | Notes |
|
|
58
|
+
| -------------------------- | ---------------------- | -------------------------------------------------- |
|
|
59
|
+
| `--config <path>` | `.theaccessible.yml` | Path to config |
|
|
60
|
+
| `--output-dir <dir>` | `./.theaccessible` | Writes `audit-report.json` + `.sarif` |
|
|
61
|
+
| `--timeout-seconds <n>` | `gate.timeout_seconds` | Sync poll budget; falls back to async beyond this |
|
|
62
|
+
| `--fail-on <reasons>` | `gate.fail_on` | Comma-separated override |
|
|
63
|
+
|
|
64
|
+
## Exit codes
|
|
65
|
+
|
|
66
|
+
| Code | Meaning |
|
|
67
|
+
| ---- | ----------------------------------------------------------------------------------------------- |
|
|
68
|
+
| `0` | Pass (including advisory-mode "would-have-failed" cases) |
|
|
69
|
+
| `1` | Accessibility gate failed in `blocking` mode |
|
|
70
|
+
| `2` | Tool / network / auth error (treat differently from `1` in CI — don't blame the code) |
|
|
71
|
+
|
|
72
|
+
Distinguish these in your CI step:
|
|
73
|
+
|
|
74
|
+
```yaml
|
|
75
|
+
- run: theaccessible audit
|
|
76
|
+
continue-on-error: ${{ inputs.gate-mode == 'advisory' }}
|
|
77
|
+
```
|
|
78
|
+
|
|
79
|
+
## Outputs
|
|
80
|
+
|
|
81
|
+
After every `audit` run you get three artifacts:
|
|
82
|
+
|
|
83
|
+
- **stdout** — human-readable grade + top regressions.
|
|
84
|
+
- **`./.theaccessible/audit-report.json`** — full machine-readable result.
|
|
85
|
+
- **`./.theaccessible/audit-report.sarif`** — SARIF 2.1.0 for GitHub code-scanning annotations.
|
|
86
|
+
|
|
87
|
+
## Git context auto-detection
|
|
88
|
+
|
|
89
|
+
The CLI pulls `commit_sha`, `branch`, `repo`, and `actor` from CI env vars (`GITHUB_SHA`, `GITHUB_REF_NAME`, `GITHUB_REPOSITORY`, `GITHUB_ACTOR`, and the equivalent GitLab `CI_*` vars), falling back to `git rev-parse` locally. The audit endpoint is **idempotent on `(repo, commit_sha, target.name)`** — re-running the same commit returns the existing job and never re-bills.
|
|
90
|
+
|
|
91
|
+
## Generic CI recipes
|
|
92
|
+
|
|
93
|
+
See [`docs/ci-integration.md`](../../docs/ci-integration.md) for GitLab CI, CircleCI, Jenkins, and Bitbucket Pipelines snippets.
|
|
@@ -0,0 +1,60 @@
|
|
|
1
|
+
export interface AuditRequest {
|
|
2
|
+
repo: string;
|
|
3
|
+
commit_sha: string;
|
|
4
|
+
branch?: string;
|
|
5
|
+
pr_number?: number;
|
|
6
|
+
actor?: string;
|
|
7
|
+
target: {
|
|
8
|
+
name: string;
|
|
9
|
+
type: 'url' | 'vpat' | 'build-artifact';
|
|
10
|
+
url?: string;
|
|
11
|
+
path?: string;
|
|
12
|
+
vpat?: unknown;
|
|
13
|
+
/** Inline HTML payload for single-page build-artifact (legacy). */
|
|
14
|
+
html?: string;
|
|
15
|
+
/** S3 key of an uploaded tar.gz build artifact (multipage). */
|
|
16
|
+
artifact_key?: string;
|
|
17
|
+
/** Entry HTML paths inside the artifact, relative to its root. */
|
|
18
|
+
entry_pages?: string[];
|
|
19
|
+
};
|
|
20
|
+
wcag_level?: 'A' | 'AA';
|
|
21
|
+
}
|
|
22
|
+
export interface AuditJobResponse {
|
|
23
|
+
status: 'queued' | 'running' | 'complete' | 'failed';
|
|
24
|
+
grade: 'A' | 'B' | 'C' | 'D' | 'F' | null;
|
|
25
|
+
score: number | null;
|
|
26
|
+
percentage: number | null;
|
|
27
|
+
result: unknown | null;
|
|
28
|
+
sarif: unknown | null;
|
|
29
|
+
error: string | null;
|
|
30
|
+
started_at: string;
|
|
31
|
+
finished_at: string | null;
|
|
32
|
+
}
|
|
33
|
+
export interface PostAuditResponse {
|
|
34
|
+
job_id: string;
|
|
35
|
+
status: 'queued' | 'running' | 'complete';
|
|
36
|
+
idempotent: boolean;
|
|
37
|
+
}
|
|
38
|
+
export interface PresignResponse {
|
|
39
|
+
artifact_key: string;
|
|
40
|
+
upload_url: string;
|
|
41
|
+
headers: Record<string, string>;
|
|
42
|
+
expires_in: number;
|
|
43
|
+
}
|
|
44
|
+
export declare class ApiError extends Error {
|
|
45
|
+
readonly status: number;
|
|
46
|
+
readonly body?: unknown | undefined;
|
|
47
|
+
constructor(message: string, status: number, body?: unknown | undefined);
|
|
48
|
+
}
|
|
49
|
+
export declare class ApiClient {
|
|
50
|
+
private readonly baseUrl;
|
|
51
|
+
private readonly apiKey;
|
|
52
|
+
constructor(baseUrl: string, apiKey: string);
|
|
53
|
+
private headers;
|
|
54
|
+
postAudit(req: AuditRequest): Promise<PostAuditResponse>;
|
|
55
|
+
presignArtifact(commitSha: string, targetName: string): Promise<PresignResponse>;
|
|
56
|
+
uploadArtifact(presign: PresignResponse, body: Buffer | Uint8Array): Promise<void>;
|
|
57
|
+
getJob(jobId: string): Promise<AuditJobResponse>;
|
|
58
|
+
getReport(id: string): Promise<unknown>;
|
|
59
|
+
listReports(repo: string, commitSha: string): Promise<unknown[]>;
|
|
60
|
+
}
|
|
@@ -0,0 +1,105 @@
|
|
|
1
|
+
export class ApiError extends Error {
|
|
2
|
+
status;
|
|
3
|
+
body;
|
|
4
|
+
constructor(message, status, body) {
|
|
5
|
+
super(message);
|
|
6
|
+
this.status = status;
|
|
7
|
+
this.body = body;
|
|
8
|
+
this.name = 'ApiError';
|
|
9
|
+
}
|
|
10
|
+
}
|
|
11
|
+
export class ApiClient {
|
|
12
|
+
baseUrl;
|
|
13
|
+
apiKey;
|
|
14
|
+
constructor(baseUrl, apiKey) {
|
|
15
|
+
this.baseUrl = baseUrl;
|
|
16
|
+
this.apiKey = apiKey;
|
|
17
|
+
}
|
|
18
|
+
headers() {
|
|
19
|
+
return {
|
|
20
|
+
'Authorization': `Bearer ${this.apiKey}`,
|
|
21
|
+
'Content-Type': 'application/json',
|
|
22
|
+
'User-Agent': 'theaccessible-audit-ci',
|
|
23
|
+
};
|
|
24
|
+
}
|
|
25
|
+
async postAudit(req) {
|
|
26
|
+
const res = await fetch(`${this.baseUrl}/api/audit`, {
|
|
27
|
+
method: 'POST',
|
|
28
|
+
headers: this.headers(),
|
|
29
|
+
body: JSON.stringify(req),
|
|
30
|
+
});
|
|
31
|
+
if (!res.ok && res.status !== 202) {
|
|
32
|
+
const body = await safeJson(res);
|
|
33
|
+
throw new ApiError(`POST /api/audit failed (${res.status})`, res.status, body);
|
|
34
|
+
}
|
|
35
|
+
const json = (await res.json());
|
|
36
|
+
if (!json.data)
|
|
37
|
+
throw new ApiError('Malformed POST /api/audit response', res.status, json);
|
|
38
|
+
return json.data;
|
|
39
|
+
}
|
|
40
|
+
async presignArtifact(commitSha, targetName) {
|
|
41
|
+
const res = await fetch(`${this.baseUrl}/api/audit/artifacts/presign`, {
|
|
42
|
+
method: 'POST',
|
|
43
|
+
headers: this.headers(),
|
|
44
|
+
body: JSON.stringify({ commit_sha: commitSha, target_name: targetName }),
|
|
45
|
+
});
|
|
46
|
+
if (!res.ok) {
|
|
47
|
+
throw new ApiError(`Presign failed (${res.status})`, res.status, await safeJson(res));
|
|
48
|
+
}
|
|
49
|
+
const json = (await res.json());
|
|
50
|
+
if (!json.data)
|
|
51
|
+
throw new ApiError('Malformed presign response', res.status, json);
|
|
52
|
+
return json.data;
|
|
53
|
+
}
|
|
54
|
+
async uploadArtifact(presign, body) {
|
|
55
|
+
const res = await fetch(presign.upload_url, {
|
|
56
|
+
method: 'PUT',
|
|
57
|
+
headers: presign.headers,
|
|
58
|
+
// Node's global fetch accepts Buffer/Uint8Array for body; the DOM
|
|
59
|
+
// BodyInit lib type isn't available without dom libs in tsconfig.
|
|
60
|
+
body: body,
|
|
61
|
+
});
|
|
62
|
+
if (!res.ok) {
|
|
63
|
+
throw new ApiError(`Artifact upload failed (${res.status})`, res.status);
|
|
64
|
+
}
|
|
65
|
+
}
|
|
66
|
+
async getJob(jobId) {
|
|
67
|
+
const res = await fetch(`${this.baseUrl}/api/audit/jobs/${jobId}`, {
|
|
68
|
+
method: 'GET',
|
|
69
|
+
headers: this.headers(),
|
|
70
|
+
});
|
|
71
|
+
if (!res.ok) {
|
|
72
|
+
throw new ApiError(`GET /api/audit/jobs/${jobId} failed (${res.status})`, res.status, await safeJson(res));
|
|
73
|
+
}
|
|
74
|
+
const json = (await res.json());
|
|
75
|
+
return json.data;
|
|
76
|
+
}
|
|
77
|
+
async getReport(id) {
|
|
78
|
+
const res = await fetch(`${this.baseUrl}/api/audit/reports/${id}`, {
|
|
79
|
+
method: 'GET',
|
|
80
|
+
headers: this.headers(),
|
|
81
|
+
});
|
|
82
|
+
if (!res.ok)
|
|
83
|
+
throw new ApiError(`GET /api/audit/reports/${id} failed`, res.status, await safeJson(res));
|
|
84
|
+
const json = (await res.json());
|
|
85
|
+
return json.data;
|
|
86
|
+
}
|
|
87
|
+
async listReports(repo, commitSha) {
|
|
88
|
+
const url = new URL(`${this.baseUrl}/api/audit/reports`);
|
|
89
|
+
url.searchParams.set('repo', repo);
|
|
90
|
+
url.searchParams.set('commit_sha', commitSha);
|
|
91
|
+
const res = await fetch(url, { headers: this.headers() });
|
|
92
|
+
if (!res.ok)
|
|
93
|
+
throw new ApiError('GET /api/audit/reports failed', res.status, await safeJson(res));
|
|
94
|
+
const json = (await res.json());
|
|
95
|
+
return json.data.reports;
|
|
96
|
+
}
|
|
97
|
+
}
|
|
98
|
+
async function safeJson(res) {
|
|
99
|
+
try {
|
|
100
|
+
return await res.json();
|
|
101
|
+
}
|
|
102
|
+
catch {
|
|
103
|
+
return undefined;
|
|
104
|
+
}
|
|
105
|
+
}
|
|
@@ -0,0 +1,39 @@
|
|
|
1
|
+
import type { ApiClient } from './api-client';
|
|
2
|
+
/** Hard cap on pages per multipage audit; matches server schema. */
|
|
3
|
+
export declare const MAX_ARTIFACT_PAGES = 25;
|
|
4
|
+
export interface MultipageArtifact {
|
|
5
|
+
mode: 'multipage';
|
|
6
|
+
artifactKey: string;
|
|
7
|
+
entryPages: string[];
|
|
8
|
+
}
|
|
9
|
+
export interface SinglePageArtifact {
|
|
10
|
+
mode: 'single';
|
|
11
|
+
html: string;
|
|
12
|
+
}
|
|
13
|
+
export type PreparedArtifact = MultipageArtifact | SinglePageArtifact;
|
|
14
|
+
/**
|
|
15
|
+
* Recursively list HTML files under `root`, returning paths relative to it.
|
|
16
|
+
*
|
|
17
|
+
* Ordering rules:
|
|
18
|
+
* - `index.html` (top level) sorts first so it's always the first page
|
|
19
|
+
* audited — common convention for entry points.
|
|
20
|
+
* - Remaining files sort lexicographically so re-runs are idempotent.
|
|
21
|
+
*
|
|
22
|
+
* Skipped:
|
|
23
|
+
* - Dot-prefixed entries (.git, .next, .cache) — never relevant for an audit
|
|
24
|
+
* - `node_modules` — heuristic to avoid auditing transitive HTML inside deps
|
|
25
|
+
*/
|
|
26
|
+
export declare function listHtmlFiles(root: string): string[];
|
|
27
|
+
/**
|
|
28
|
+
* Resolve a build-artifact target to either a multipage upload bundle or a
|
|
29
|
+
* single-page inline-HTML payload, based on whether `path` is a directory.
|
|
30
|
+
*
|
|
31
|
+
* Multipage path: tars the directory (system `tar`), presigns + uploads to S3
|
|
32
|
+
* via the API, returns the artifact key + entry-page list. The server then
|
|
33
|
+
* enqueues one audit job per page (file:// rendering).
|
|
34
|
+
*
|
|
35
|
+
* Single-page path: reads the HTML file directly. Kept for the case where a
|
|
36
|
+
* caller has only one HTML to audit and wants to skip the S3 round-trip;
|
|
37
|
+
* loses external-CSS rendering, hence the stderr warning.
|
|
38
|
+
*/
|
|
39
|
+
export declare function prepareBuildArtifact(path: string | undefined, targetName: string, commitSha: string, client: ApiClient, writeStderr?: (msg: string) => void): Promise<PreparedArtifact>;
|
package/dist/artifact.js
ADDED
|
@@ -0,0 +1,80 @@
|
|
|
1
|
+
import { readFileSync, readdirSync, statSync } from 'node:fs';
|
|
2
|
+
import { execFileSync } from 'node:child_process';
|
|
3
|
+
import { join, relative, resolve } from 'node:path';
|
|
4
|
+
/** Hard cap on pages per multipage audit; matches server schema. */
|
|
5
|
+
export const MAX_ARTIFACT_PAGES = 25;
|
|
6
|
+
/**
|
|
7
|
+
* Recursively list HTML files under `root`, returning paths relative to it.
|
|
8
|
+
*
|
|
9
|
+
* Ordering rules:
|
|
10
|
+
* - `index.html` (top level) sorts first so it's always the first page
|
|
11
|
+
* audited — common convention for entry points.
|
|
12
|
+
* - Remaining files sort lexicographically so re-runs are idempotent.
|
|
13
|
+
*
|
|
14
|
+
* Skipped:
|
|
15
|
+
* - Dot-prefixed entries (.git, .next, .cache) — never relevant for an audit
|
|
16
|
+
* - `node_modules` — heuristic to avoid auditing transitive HTML inside deps
|
|
17
|
+
*/
|
|
18
|
+
export function listHtmlFiles(root) {
|
|
19
|
+
const out = [];
|
|
20
|
+
const walk = (dir) => {
|
|
21
|
+
for (const entry of readdirSync(dir, { withFileTypes: true })) {
|
|
22
|
+
if (entry.name.startsWith('.') || entry.name === 'node_modules')
|
|
23
|
+
continue;
|
|
24
|
+
const full = join(dir, entry.name);
|
|
25
|
+
if (entry.isDirectory())
|
|
26
|
+
walk(full);
|
|
27
|
+
else if (entry.isFile() && /\.html?$/i.test(entry.name)) {
|
|
28
|
+
out.push(relative(root, full));
|
|
29
|
+
}
|
|
30
|
+
}
|
|
31
|
+
};
|
|
32
|
+
walk(root);
|
|
33
|
+
return out.sort((a, b) => {
|
|
34
|
+
if (a === 'index.html')
|
|
35
|
+
return -1;
|
|
36
|
+
if (b === 'index.html')
|
|
37
|
+
return 1;
|
|
38
|
+
return a.localeCompare(b);
|
|
39
|
+
});
|
|
40
|
+
}
|
|
41
|
+
/**
|
|
42
|
+
* Resolve a build-artifact target to either a multipage upload bundle or a
|
|
43
|
+
* single-page inline-HTML payload, based on whether `path` is a directory.
|
|
44
|
+
*
|
|
45
|
+
* Multipage path: tars the directory (system `tar`), presigns + uploads to S3
|
|
46
|
+
* via the API, returns the artifact key + entry-page list. The server then
|
|
47
|
+
* enqueues one audit job per page (file:// rendering).
|
|
48
|
+
*
|
|
49
|
+
* Single-page path: reads the HTML file directly. Kept for the case where a
|
|
50
|
+
* caller has only one HTML to audit and wants to skip the S3 round-trip;
|
|
51
|
+
* loses external-CSS rendering, hence the stderr warning.
|
|
52
|
+
*/
|
|
53
|
+
export async function prepareBuildArtifact(path, targetName, commitSha, client, writeStderr = (m) => process.stderr.write(m)) {
|
|
54
|
+
if (!path) {
|
|
55
|
+
throw new Error('build-artifact target requires a `path` (HTML file or directory)');
|
|
56
|
+
}
|
|
57
|
+
const abs = resolve(process.cwd(), path);
|
|
58
|
+
const stat = statSync(abs);
|
|
59
|
+
if (!stat.isDirectory()) {
|
|
60
|
+
writeStderr(' ℹ Single-page mode (CSS-derived axe rules may be skipped). Point `path` at a directory for full multipage + asset support.\n');
|
|
61
|
+
return { mode: 'single', html: readFileSync(abs, 'utf8') };
|
|
62
|
+
}
|
|
63
|
+
const htmls = listHtmlFiles(abs).slice(0, MAX_ARTIFACT_PAGES);
|
|
64
|
+
if (htmls.length === 0) {
|
|
65
|
+
throw new Error(`No HTML files found under ${abs}`);
|
|
66
|
+
}
|
|
67
|
+
writeStderr(` → Tarring ${htmls.length} page(s) from ${abs}\n`);
|
|
68
|
+
// -C chdirs so the tar's root matches what we send as entry_pages.
|
|
69
|
+
const tarBuf = execFileSync('tar', ['-czf', '-', '-C', abs, '.'], {
|
|
70
|
+
maxBuffer: 256 * 1024 * 1024,
|
|
71
|
+
});
|
|
72
|
+
writeStderr(` → Uploading ${tarBuf.length} bytes\n`);
|
|
73
|
+
const presign = await client.presignArtifact(commitSha, targetName);
|
|
74
|
+
await client.uploadArtifact(presign, tarBuf);
|
|
75
|
+
return {
|
|
76
|
+
mode: 'multipage',
|
|
77
|
+
artifactKey: presign.artifact_key,
|
|
78
|
+
entryPages: htmls,
|
|
79
|
+
};
|
|
80
|
+
}
|
package/dist/cli.d.ts
ADDED
package/dist/cli.js
ADDED
|
@@ -0,0 +1,228 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
import { Command } from 'commander';
|
|
3
|
+
import { readFileSync } from 'node:fs';
|
|
4
|
+
import { resolve } from 'node:path';
|
|
5
|
+
import { prepareBuildArtifact } from './artifact';
|
|
6
|
+
import { ApiClient, ApiError } from './api-client';
|
|
7
|
+
import { loadConfig, ConfigError } from './config';
|
|
8
|
+
import { decideExitCode, ExitCode } from './exit-codes';
|
|
9
|
+
import { humanSummary, writeOutputs } from './output';
|
|
10
|
+
import { pollUntilComplete } from './poll';
|
|
11
|
+
import { detectGitContext } from './git';
|
|
12
|
+
const program = new Command();
|
|
13
|
+
function getApiKey() {
|
|
14
|
+
const key = process.env.THEACCESSIBLE_API_KEY;
|
|
15
|
+
if (!key || !key.trim()) {
|
|
16
|
+
process.stderr.write('ERROR: THEACCESSIBLE_API_KEY env var is not set.\n');
|
|
17
|
+
process.exit(ExitCode.TOOL_ERROR);
|
|
18
|
+
}
|
|
19
|
+
return key.trim();
|
|
20
|
+
}
|
|
21
|
+
function getBaseUrl() {
|
|
22
|
+
return process.env.THEACCESSIBLE_API_URL ?? 'https://api.theaccessible.org';
|
|
23
|
+
}
|
|
24
|
+
function maskKey(k) {
|
|
25
|
+
if (k.length < 10)
|
|
26
|
+
return '****';
|
|
27
|
+
return `${k.slice(0, 6)}…${k.slice(-4)}`;
|
|
28
|
+
}
|
|
29
|
+
program
|
|
30
|
+
.name('theaccessible')
|
|
31
|
+
.description('TheAccessible CI/CD audit CLI')
|
|
32
|
+
.version('0.1.0');
|
|
33
|
+
program
|
|
34
|
+
.command('audit')
|
|
35
|
+
.argument('[target]', 'Target name from .theaccessible.yml (default: first target)')
|
|
36
|
+
.option('-c, --config <path>', 'Path to .theaccessible.yml', '.theaccessible.yml')
|
|
37
|
+
.option('-o, --output-dir <dir>', 'Directory for audit-report.json + .sarif', './.theaccessible')
|
|
38
|
+
.option('--timeout-seconds <n>', 'Override config timeout', (v) => parseInt(v, 10))
|
|
39
|
+
.option('--fail-on <reasons>', 'Override gate.fail_on (comma-separated)')
|
|
40
|
+
.action(async (targetArg, opts) => {
|
|
41
|
+
let exit = ExitCode.PASS;
|
|
42
|
+
try {
|
|
43
|
+
const cfg = loadConfig(opts.config);
|
|
44
|
+
const target = targetArg
|
|
45
|
+
? cfg.targets.find((t) => t.name === targetArg)
|
|
46
|
+
: cfg.targets[0];
|
|
47
|
+
if (!target) {
|
|
48
|
+
process.stderr.write(`ERROR: target "${targetArg}" not found in ${opts.config}\n`);
|
|
49
|
+
process.exit(ExitCode.TOOL_ERROR);
|
|
50
|
+
}
|
|
51
|
+
const apiKey = getApiKey();
|
|
52
|
+
const client = new ApiClient(getBaseUrl(), apiKey);
|
|
53
|
+
const ctx = detectGitContext();
|
|
54
|
+
if (!ctx.commitSha || !ctx.repo) {
|
|
55
|
+
process.stderr.write('ERROR: Could not detect commit_sha or repo. Set GITHUB_SHA + GITHUB_REPOSITORY or run inside a git repo.\n');
|
|
56
|
+
process.exit(ExitCode.TOOL_ERROR);
|
|
57
|
+
}
|
|
58
|
+
process.stderr.write(`→ Auditing ${target.name} (${target.type}) — key ${maskKey(apiKey)}\n`);
|
|
59
|
+
const vpat = target.type === 'vpat' && target.path ? readJsonFile(target.path) : undefined;
|
|
60
|
+
// For build-artifact targets, two shapes are supported:
|
|
61
|
+
// - `path` points at a directory → tar it, upload, audit every
|
|
62
|
+
// HTML inside (multipage; CSS / images load via file://)
|
|
63
|
+
// - `path` points at a single .html file → legacy inline-HTML path
|
|
64
|
+
let html;
|
|
65
|
+
let artifactKey;
|
|
66
|
+
let entryPages;
|
|
67
|
+
if (target.type === 'build-artifact') {
|
|
68
|
+
const artifact = await prepareBuildArtifact(target.path, target.name, ctx.commitSha, client);
|
|
69
|
+
if (artifact.mode === 'multipage') {
|
|
70
|
+
artifactKey = artifact.artifactKey;
|
|
71
|
+
entryPages = artifact.entryPages;
|
|
72
|
+
}
|
|
73
|
+
else {
|
|
74
|
+
html = artifact.html;
|
|
75
|
+
}
|
|
76
|
+
}
|
|
77
|
+
const post = await client.postAudit({
|
|
78
|
+
repo: ctx.repo,
|
|
79
|
+
commit_sha: ctx.commitSha,
|
|
80
|
+
branch: ctx.branch,
|
|
81
|
+
actor: ctx.actor,
|
|
82
|
+
...(ctx.prNumber ? { pr_number: ctx.prNumber } : {}),
|
|
83
|
+
target: {
|
|
84
|
+
name: target.name,
|
|
85
|
+
type: target.type,
|
|
86
|
+
url: target.url,
|
|
87
|
+
path: target.path,
|
|
88
|
+
vpat,
|
|
89
|
+
html,
|
|
90
|
+
...(artifactKey ? { artifact_key: artifactKey } : {}),
|
|
91
|
+
...(entryPages ? { entry_pages: entryPages } : {}),
|
|
92
|
+
},
|
|
93
|
+
});
|
|
94
|
+
if (post.idempotent)
|
|
95
|
+
process.stderr.write(` ↻ Existing run reused (${post.job_id})\n`);
|
|
96
|
+
const timeout = opts.timeoutSeconds ?? cfg.timeout_seconds;
|
|
97
|
+
const job = await pollUntilComplete(client, post.job_id, { timeoutSeconds: timeout });
|
|
98
|
+
const outputs = writeOutputs(job, opts.outputDir);
|
|
99
|
+
process.stdout.write(`${humanSummary(job)}\n`);
|
|
100
|
+
process.stderr.write(`✓ Wrote ${outputs.jsonPath}\n`);
|
|
101
|
+
process.stderr.write(`✓ Wrote ${outputs.sarifPath}\n`);
|
|
102
|
+
// Credit exhaustion is an explicit pass-through: the design contract is
|
|
103
|
+
// "never fail a deploy because the org ran out of audit credits". The
|
|
104
|
+
// server marks the run failed with error=INSUFFICIENT_CREDITS so the
|
|
105
|
+
// dashboard can show it, but the CLI exits clean.
|
|
106
|
+
if (job.status === 'failed' && job.error === 'INSUFFICIENT_CREDITS') {
|
|
107
|
+
process.stderr.write('⚠ Audit skipped: organization is out of CI credits. Deploy not blocked.\n');
|
|
108
|
+
process.exit(ExitCode.PASS);
|
|
109
|
+
}
|
|
110
|
+
if (job.status === 'failed' || !job.grade) {
|
|
111
|
+
process.exit(ExitCode.TOOL_ERROR);
|
|
112
|
+
}
|
|
113
|
+
const failOnOverride = opts.failOn
|
|
114
|
+
? opts.failOn.split(',').map((s) => s.trim()).filter(Boolean)
|
|
115
|
+
: cfg.gate.fail_on;
|
|
116
|
+
const result = job.result;
|
|
117
|
+
const newCriticalIssues = result?.failingCriteria?.filter((c) => c.conformance === 'Does Not Support').length ?? 0;
|
|
118
|
+
// Resolve previous grade from base SHA so grade_regression gates can fire.
|
|
119
|
+
// Best-effort: a network error or missing prior run shouldn't fail the
|
|
120
|
+
// CLI — regression detection just skips silently in that case.
|
|
121
|
+
let previousGrade = null;
|
|
122
|
+
if (ctx.baseSha && failOnOverride.includes('grade_regression')) {
|
|
123
|
+
try {
|
|
124
|
+
const baseReports = (await client.listReports(ctx.repo, ctx.baseSha));
|
|
125
|
+
const baseTarget = baseReports.find((r) => r.target_name === target.name);
|
|
126
|
+
previousGrade = baseTarget?.grade ?? null;
|
|
127
|
+
}
|
|
128
|
+
catch (err) {
|
|
129
|
+
process.stderr.write(`⚠ Could not fetch base report for ${ctx.baseSha}: ${err instanceof Error ? err.message : String(err)}\n`);
|
|
130
|
+
}
|
|
131
|
+
}
|
|
132
|
+
const decision = decideExitCode({ ...cfg.gate, fail_on: failOnOverride }, {
|
|
133
|
+
grade: job.grade,
|
|
134
|
+
newCriticalIssues,
|
|
135
|
+
previousGrade,
|
|
136
|
+
});
|
|
137
|
+
if (decision.code !== ExitCode.PASS) {
|
|
138
|
+
process.stderr.write(`✗ Gate failed: ${decision.reasons.join('; ')}\n`);
|
|
139
|
+
}
|
|
140
|
+
else if (decision.reasons.length > 0) {
|
|
141
|
+
process.stderr.write(`⚠ Advisory: ${decision.reasons.join('; ')}\n`);
|
|
142
|
+
}
|
|
143
|
+
exit = decision.code;
|
|
144
|
+
}
|
|
145
|
+
catch (err) {
|
|
146
|
+
handleError(err);
|
|
147
|
+
exit = ExitCode.TOOL_ERROR;
|
|
148
|
+
}
|
|
149
|
+
process.exit(exit);
|
|
150
|
+
});
|
|
151
|
+
program
|
|
152
|
+
.command('diff')
|
|
153
|
+
.argument('<base-sha>', 'Base commit SHA')
|
|
154
|
+
.argument('<head-sha>', 'Head commit SHA')
|
|
155
|
+
.option('-c, --config <path>', 'Path to .theaccessible.yml', '.theaccessible.yml')
|
|
156
|
+
.action(async (baseSha, headSha, opts) => {
|
|
157
|
+
try {
|
|
158
|
+
const cfg = loadConfig(opts.config);
|
|
159
|
+
const client = new ApiClient(getBaseUrl(), getApiKey());
|
|
160
|
+
const ctx = detectGitContext();
|
|
161
|
+
if (!ctx.repo)
|
|
162
|
+
throw new Error('Could not detect repo (set GITHUB_REPOSITORY)');
|
|
163
|
+
const [baseReports, headReports] = await Promise.all([
|
|
164
|
+
client.listReports(ctx.repo, baseSha),
|
|
165
|
+
client.listReports(ctx.repo, headSha),
|
|
166
|
+
]);
|
|
167
|
+
process.stdout.write(formatDiff(cfg.targets, baseReports, headReports) + '\n');
|
|
168
|
+
}
|
|
169
|
+
catch (err) {
|
|
170
|
+
handleError(err);
|
|
171
|
+
process.exit(ExitCode.TOOL_ERROR);
|
|
172
|
+
}
|
|
173
|
+
});
|
|
174
|
+
program
|
|
175
|
+
.command('report')
|
|
176
|
+
.argument('<commit-sha>')
|
|
177
|
+
.action(async (commitSha) => {
|
|
178
|
+
try {
|
|
179
|
+
const client = new ApiClient(getBaseUrl(), getApiKey());
|
|
180
|
+
const ctx = detectGitContext();
|
|
181
|
+
if (!ctx.repo)
|
|
182
|
+
throw new Error('Could not detect repo');
|
|
183
|
+
const reports = await client.listReports(ctx.repo, commitSha);
|
|
184
|
+
process.stdout.write(JSON.stringify(reports, null, 2) + '\n');
|
|
185
|
+
}
|
|
186
|
+
catch (err) {
|
|
187
|
+
handleError(err);
|
|
188
|
+
process.exit(ExitCode.TOOL_ERROR);
|
|
189
|
+
}
|
|
190
|
+
});
|
|
191
|
+
function readJsonFile(path) {
|
|
192
|
+
return JSON.parse(readFileSync(resolve(process.cwd(), path), 'utf8'));
|
|
193
|
+
}
|
|
194
|
+
function formatDiff(targets, base, head) {
|
|
195
|
+
const byName = (rs) => new Map(rs.map((r) => [
|
|
196
|
+
r.target_name,
|
|
197
|
+
r,
|
|
198
|
+
]));
|
|
199
|
+
const b = byName(base);
|
|
200
|
+
const h = byName(head);
|
|
201
|
+
const lines = ['Target Base Head Δ%'];
|
|
202
|
+
for (const t of targets) {
|
|
203
|
+
const br = b.get(t.name);
|
|
204
|
+
const hr = h.get(t.name);
|
|
205
|
+
const delta = hr?.percentage != null && br?.percentage != null ? (hr.percentage - br.percentage).toFixed(1) : '—';
|
|
206
|
+
lines.push(`${t.name.padEnd(20)}${(br?.grade ?? '—').padEnd(7)}${(hr?.grade ?? '—').padEnd(7)}${delta}`);
|
|
207
|
+
}
|
|
208
|
+
return lines.join('\n');
|
|
209
|
+
}
|
|
210
|
+
function handleError(err) {
|
|
211
|
+
if (err instanceof ConfigError) {
|
|
212
|
+
process.stderr.write(`ERROR: ${err.message}\n`);
|
|
213
|
+
if (err.details)
|
|
214
|
+
process.stderr.write(JSON.stringify(err.details, null, 2) + '\n');
|
|
215
|
+
}
|
|
216
|
+
else if (err instanceof ApiError) {
|
|
217
|
+
process.stderr.write(`ERROR: ${err.message} (HTTP ${err.status})\n`);
|
|
218
|
+
if (err.body)
|
|
219
|
+
process.stderr.write(JSON.stringify(err.body, null, 2) + '\n');
|
|
220
|
+
}
|
|
221
|
+
else if (err instanceof Error) {
|
|
222
|
+
process.stderr.write(`ERROR: ${err.message}\n`);
|
|
223
|
+
}
|
|
224
|
+
else {
|
|
225
|
+
process.stderr.write(`ERROR: ${String(err)}\n`);
|
|
226
|
+
}
|
|
227
|
+
}
|
|
228
|
+
program.parseAsync(process.argv);
|
package/dist/config.d.ts
ADDED
|
@@ -0,0 +1,107 @@
|
|
|
1
|
+
import { z } from 'zod';
|
|
2
|
+
export declare const ConfigSchema: z.ZodObject<{
|
|
3
|
+
version: z.ZodLiteral<1>;
|
|
4
|
+
targets: z.ZodArray<z.ZodObject<{
|
|
5
|
+
name: z.ZodString;
|
|
6
|
+
type: z.ZodEnum<["url", "vpat", "build-artifact"]>;
|
|
7
|
+
url: z.ZodOptional<z.ZodString>;
|
|
8
|
+
path: z.ZodOptional<z.ZodString>;
|
|
9
|
+
}, "strip", z.ZodTypeAny, {
|
|
10
|
+
type: "url" | "vpat" | "build-artifact";
|
|
11
|
+
name: string;
|
|
12
|
+
url?: string | undefined;
|
|
13
|
+
path?: string | undefined;
|
|
14
|
+
}, {
|
|
15
|
+
type: "url" | "vpat" | "build-artifact";
|
|
16
|
+
name: string;
|
|
17
|
+
url?: string | undefined;
|
|
18
|
+
path?: string | undefined;
|
|
19
|
+
}>, "many">;
|
|
20
|
+
gate: z.ZodDefault<z.ZodObject<{
|
|
21
|
+
mode: z.ZodDefault<z.ZodEnum<["advisory", "blocking"]>>;
|
|
22
|
+
min_grade: z.ZodDefault<z.ZodEnum<["A", "B", "C", "D", "F"]>>;
|
|
23
|
+
fail_on: z.ZodDefault<z.ZodArray<z.ZodEnum<["new_critical_issues", "grade_regression", "min_grade_violation"]>, "many">>;
|
|
24
|
+
allow_regressions_until: z.ZodOptional<z.ZodString>;
|
|
25
|
+
}, "strip", z.ZodTypeAny, {
|
|
26
|
+
mode: "advisory" | "blocking";
|
|
27
|
+
min_grade: "A" | "B" | "C" | "D" | "F";
|
|
28
|
+
fail_on: ("new_critical_issues" | "grade_regression" | "min_grade_violation")[];
|
|
29
|
+
allow_regressions_until?: string | undefined;
|
|
30
|
+
}, {
|
|
31
|
+
mode?: "advisory" | "blocking" | undefined;
|
|
32
|
+
min_grade?: "A" | "B" | "C" | "D" | "F" | undefined;
|
|
33
|
+
fail_on?: ("new_critical_issues" | "grade_regression" | "min_grade_violation")[] | undefined;
|
|
34
|
+
allow_regressions_until?: string | undefined;
|
|
35
|
+
}>>;
|
|
36
|
+
notifications: z.ZodOptional<z.ZodObject<{
|
|
37
|
+
email: z.ZodOptional<z.ZodObject<{
|
|
38
|
+
digest: z.ZodDefault<z.ZodEnum<["off", "daily", "weekly"]>>;
|
|
39
|
+
recipients: z.ZodDefault<z.ZodArray<z.ZodString, "many">>;
|
|
40
|
+
}, "strip", z.ZodTypeAny, {
|
|
41
|
+
digest: "off" | "daily" | "weekly";
|
|
42
|
+
recipients: string[];
|
|
43
|
+
}, {
|
|
44
|
+
digest?: "off" | "daily" | "weekly" | undefined;
|
|
45
|
+
recipients?: string[] | undefined;
|
|
46
|
+
}>>;
|
|
47
|
+
}, "strip", z.ZodTypeAny, {
|
|
48
|
+
email?: {
|
|
49
|
+
digest: "off" | "daily" | "weekly";
|
|
50
|
+
recipients: string[];
|
|
51
|
+
} | undefined;
|
|
52
|
+
}, {
|
|
53
|
+
email?: {
|
|
54
|
+
digest?: "off" | "daily" | "weekly" | undefined;
|
|
55
|
+
recipients?: string[] | undefined;
|
|
56
|
+
} | undefined;
|
|
57
|
+
}>>;
|
|
58
|
+
timeout_seconds: z.ZodDefault<z.ZodNumber>;
|
|
59
|
+
}, "strip", z.ZodTypeAny, {
|
|
60
|
+
version: 1;
|
|
61
|
+
targets: {
|
|
62
|
+
type: "url" | "vpat" | "build-artifact";
|
|
63
|
+
name: string;
|
|
64
|
+
url?: string | undefined;
|
|
65
|
+
path?: string | undefined;
|
|
66
|
+
}[];
|
|
67
|
+
gate: {
|
|
68
|
+
mode: "advisory" | "blocking";
|
|
69
|
+
min_grade: "A" | "B" | "C" | "D" | "F";
|
|
70
|
+
fail_on: ("new_critical_issues" | "grade_regression" | "min_grade_violation")[];
|
|
71
|
+
allow_regressions_until?: string | undefined;
|
|
72
|
+
};
|
|
73
|
+
timeout_seconds: number;
|
|
74
|
+
notifications?: {
|
|
75
|
+
email?: {
|
|
76
|
+
digest: "off" | "daily" | "weekly";
|
|
77
|
+
recipients: string[];
|
|
78
|
+
} | undefined;
|
|
79
|
+
} | undefined;
|
|
80
|
+
}, {
|
|
81
|
+
version: 1;
|
|
82
|
+
targets: {
|
|
83
|
+
type: "url" | "vpat" | "build-artifact";
|
|
84
|
+
name: string;
|
|
85
|
+
url?: string | undefined;
|
|
86
|
+
path?: string | undefined;
|
|
87
|
+
}[];
|
|
88
|
+
gate?: {
|
|
89
|
+
mode?: "advisory" | "blocking" | undefined;
|
|
90
|
+
min_grade?: "A" | "B" | "C" | "D" | "F" | undefined;
|
|
91
|
+
fail_on?: ("new_critical_issues" | "grade_regression" | "min_grade_violation")[] | undefined;
|
|
92
|
+
allow_regressions_until?: string | undefined;
|
|
93
|
+
} | undefined;
|
|
94
|
+
notifications?: {
|
|
95
|
+
email?: {
|
|
96
|
+
digest?: "off" | "daily" | "weekly" | undefined;
|
|
97
|
+
recipients?: string[] | undefined;
|
|
98
|
+
} | undefined;
|
|
99
|
+
} | undefined;
|
|
100
|
+
timeout_seconds?: number | undefined;
|
|
101
|
+
}>;
|
|
102
|
+
export type Config = z.infer<typeof ConfigSchema>;
|
|
103
|
+
export declare class ConfigError extends Error {
|
|
104
|
+
readonly details?: unknown | undefined;
|
|
105
|
+
constructor(message: string, details?: unknown | undefined);
|
|
106
|
+
}
|
|
107
|
+
export declare function loadConfig(path: string): Config;
|
package/dist/config.js
ADDED
|
@@ -0,0 +1,60 @@
|
|
|
1
|
+
import { z } from 'zod';
|
|
2
|
+
import { readFileSync } from 'node:fs';
|
|
3
|
+
import { resolve } from 'node:path';
|
|
4
|
+
import yaml from 'js-yaml';
|
|
5
|
+
export const ConfigSchema = z.object({
|
|
6
|
+
version: z.literal(1),
|
|
7
|
+
targets: z
|
|
8
|
+
.array(z.object({
|
|
9
|
+
name: z.string().min(1),
|
|
10
|
+
type: z.enum(['url', 'vpat', 'build-artifact']),
|
|
11
|
+
url: z.string().url().optional(),
|
|
12
|
+
path: z.string().optional(),
|
|
13
|
+
}))
|
|
14
|
+
.min(1),
|
|
15
|
+
gate: z
|
|
16
|
+
.object({
|
|
17
|
+
mode: z.enum(['advisory', 'blocking']).default('advisory'),
|
|
18
|
+
min_grade: z.enum(['A', 'B', 'C', 'D', 'F']).default('B'),
|
|
19
|
+
fail_on: z
|
|
20
|
+
.array(z.enum(['new_critical_issues', 'grade_regression', 'min_grade_violation']))
|
|
21
|
+
.default([]),
|
|
22
|
+
allow_regressions_until: z.string().optional(),
|
|
23
|
+
})
|
|
24
|
+
.default({ mode: 'advisory', min_grade: 'B', fail_on: [] }),
|
|
25
|
+
notifications: z
|
|
26
|
+
.object({
|
|
27
|
+
email: z
|
|
28
|
+
.object({
|
|
29
|
+
digest: z.enum(['off', 'daily', 'weekly']).default('off'),
|
|
30
|
+
recipients: z.array(z.string().email()).default([]),
|
|
31
|
+
})
|
|
32
|
+
.optional(),
|
|
33
|
+
})
|
|
34
|
+
.optional(),
|
|
35
|
+
timeout_seconds: z.number().int().positive().default(300),
|
|
36
|
+
});
|
|
37
|
+
export class ConfigError extends Error {
|
|
38
|
+
details;
|
|
39
|
+
constructor(message, details) {
|
|
40
|
+
super(message);
|
|
41
|
+
this.details = details;
|
|
42
|
+
this.name = 'ConfigError';
|
|
43
|
+
}
|
|
44
|
+
}
|
|
45
|
+
export function loadConfig(path) {
|
|
46
|
+
const abs = resolve(process.cwd(), path);
|
|
47
|
+
let raw;
|
|
48
|
+
try {
|
|
49
|
+
raw = readFileSync(abs, 'utf8');
|
|
50
|
+
}
|
|
51
|
+
catch (err) {
|
|
52
|
+
throw new ConfigError(`Could not read config at ${abs}: ${err.message}`);
|
|
53
|
+
}
|
|
54
|
+
const parsed = yaml.load(raw);
|
|
55
|
+
const result = ConfigSchema.safeParse(parsed);
|
|
56
|
+
if (!result.success) {
|
|
57
|
+
throw new ConfigError('Invalid .theaccessible.yml', result.error.flatten());
|
|
58
|
+
}
|
|
59
|
+
return result.data;
|
|
60
|
+
}
|
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
export declare const ExitCode: {
|
|
2
|
+
readonly PASS: 0;
|
|
3
|
+
readonly GATE_FAILURE: 1;
|
|
4
|
+
readonly TOOL_ERROR: 2;
|
|
5
|
+
};
|
|
6
|
+
export type ExitCodeValue = (typeof ExitCode)[keyof typeof ExitCode];
|
|
7
|
+
import type { Config } from './config';
|
|
8
|
+
export interface GateInputs {
|
|
9
|
+
grade: 'A' | 'B' | 'C' | 'D' | 'F';
|
|
10
|
+
newCriticalIssues: number;
|
|
11
|
+
previousGrade?: 'A' | 'B' | 'C' | 'D' | 'F' | null;
|
|
12
|
+
}
|
|
13
|
+
export declare function decideExitCode(gate: Config['gate'], inputs: GateInputs): {
|
|
14
|
+
code: ExitCodeValue;
|
|
15
|
+
reasons: string[];
|
|
16
|
+
};
|
|
@@ -0,0 +1,25 @@
|
|
|
1
|
+
export const ExitCode = {
|
|
2
|
+
PASS: 0,
|
|
3
|
+
GATE_FAILURE: 1,
|
|
4
|
+
TOOL_ERROR: 2,
|
|
5
|
+
};
|
|
6
|
+
const ORDER = { A: 5, B: 4, C: 3, D: 2, F: 1 };
|
|
7
|
+
export function decideExitCode(gate, inputs) {
|
|
8
|
+
const reasons = [];
|
|
9
|
+
if (gate.fail_on.includes('min_grade_violation') && ORDER[inputs.grade] < ORDER[gate.min_grade]) {
|
|
10
|
+
reasons.push(`grade ${inputs.grade} is below min_grade ${gate.min_grade}`);
|
|
11
|
+
}
|
|
12
|
+
if (gate.fail_on.includes('new_critical_issues') && inputs.newCriticalIssues > 0) {
|
|
13
|
+
reasons.push(`${inputs.newCriticalIssues} new critical issue(s)`);
|
|
14
|
+
}
|
|
15
|
+
if (gate.fail_on.includes('grade_regression') &&
|
|
16
|
+
inputs.previousGrade &&
|
|
17
|
+
ORDER[inputs.grade] < ORDER[inputs.previousGrade]) {
|
|
18
|
+
reasons.push(`grade regressed from ${inputs.previousGrade} to ${inputs.grade}`);
|
|
19
|
+
}
|
|
20
|
+
if (reasons.length === 0)
|
|
21
|
+
return { code: ExitCode.PASS, reasons };
|
|
22
|
+
if (gate.mode === 'advisory')
|
|
23
|
+
return { code: ExitCode.PASS, reasons };
|
|
24
|
+
return { code: ExitCode.GATE_FAILURE, reasons };
|
|
25
|
+
}
|
package/dist/git.d.ts
ADDED
|
@@ -0,0 +1,14 @@
|
|
|
1
|
+
export interface GitContext {
|
|
2
|
+
commitSha?: string;
|
|
3
|
+
branch?: string;
|
|
4
|
+
repo?: string;
|
|
5
|
+
actor?: string;
|
|
6
|
+
/**
|
|
7
|
+
* Base commit SHA for regression comparison. On a GitHub PR this is the
|
|
8
|
+
* merge target's SHA at fork time. Falls back to `git merge-base` against
|
|
9
|
+
* the local default branch.
|
|
10
|
+
*/
|
|
11
|
+
baseSha?: string;
|
|
12
|
+
prNumber?: number;
|
|
13
|
+
}
|
|
14
|
+
export declare function detectGitContext(): GitContext;
|
package/dist/git.js
ADDED
|
@@ -0,0 +1,39 @@
|
|
|
1
|
+
import { execFileSync } from 'node:child_process';
|
|
2
|
+
function safeGit(args) {
|
|
3
|
+
try {
|
|
4
|
+
return execFileSync('git', args, { stdio: ['ignore', 'pipe', 'ignore'] }).toString().trim();
|
|
5
|
+
}
|
|
6
|
+
catch {
|
|
7
|
+
return undefined;
|
|
8
|
+
}
|
|
9
|
+
}
|
|
10
|
+
export function detectGitContext() {
|
|
11
|
+
const commitSha = process.env.GITHUB_SHA ?? process.env.CI_COMMIT_SHA ?? safeGit(['rev-parse', 'HEAD']);
|
|
12
|
+
// PR base SHA. GitHub exposes the merge target ref via GITHUB_BASE_REF
|
|
13
|
+
// (e.g. "main"). Resolve it against the remote so we get the SHA that the
|
|
14
|
+
// base branch had when CI started, not the local working tree.
|
|
15
|
+
let baseSha;
|
|
16
|
+
if (process.env.GITHUB_BASE_REF) {
|
|
17
|
+
baseSha = safeGit(['rev-parse', `origin/${process.env.GITHUB_BASE_REF}`]);
|
|
18
|
+
}
|
|
19
|
+
else if (process.env.CI_MERGE_REQUEST_DIFF_BASE_SHA) {
|
|
20
|
+
baseSha = process.env.CI_MERGE_REQUEST_DIFF_BASE_SHA;
|
|
21
|
+
}
|
|
22
|
+
else if (commitSha) {
|
|
23
|
+
// Local / push event — best-effort merge-base against the default remote.
|
|
24
|
+
baseSha = safeGit(['merge-base', commitSha, 'origin/HEAD']) ?? safeGit(['merge-base', commitSha, 'origin/main']);
|
|
25
|
+
}
|
|
26
|
+
const prRaw = process.env.GITHUB_PR_NUMBER ?? process.env.CI_MERGE_REQUEST_IID;
|
|
27
|
+
const prNumber = prRaw ? Number.parseInt(prRaw, 10) : undefined;
|
|
28
|
+
return {
|
|
29
|
+
commitSha,
|
|
30
|
+
branch: process.env.GITHUB_HEAD_REF ??
|
|
31
|
+
process.env.GITHUB_REF_NAME ??
|
|
32
|
+
process.env.CI_COMMIT_REF_NAME ??
|
|
33
|
+
safeGit(['rev-parse', '--abbrev-ref', 'HEAD']),
|
|
34
|
+
repo: process.env.GITHUB_REPOSITORY ?? process.env.CI_PROJECT_PATH,
|
|
35
|
+
actor: process.env.GITHUB_ACTOR ?? process.env.GITLAB_USER_LOGIN,
|
|
36
|
+
baseSha,
|
|
37
|
+
...(prNumber && Number.isFinite(prNumber) ? { prNumber } : {}),
|
|
38
|
+
};
|
|
39
|
+
}
|
package/dist/index.d.ts
ADDED
|
@@ -0,0 +1,5 @@
|
|
|
1
|
+
export { ApiClient, type AuditJobResponse, type AuditRequest } from './api-client';
|
|
2
|
+
export { loadConfig, ConfigSchema, type Config } from './config';
|
|
3
|
+
export { ExitCode, decideExitCode } from './exit-codes';
|
|
4
|
+
export { humanSummary, writeOutputs } from './output';
|
|
5
|
+
export { pollUntilComplete } from './poll';
|
package/dist/index.js
ADDED
package/dist/output.d.ts
ADDED
|
@@ -0,0 +1,7 @@
|
|
|
1
|
+
import type { AuditJobResponse } from './api-client';
|
|
2
|
+
export interface OutputResult {
|
|
3
|
+
jsonPath: string;
|
|
4
|
+
sarifPath: string;
|
|
5
|
+
}
|
|
6
|
+
export declare function writeOutputs(job: AuditJobResponse, outputDir: string): OutputResult;
|
|
7
|
+
export declare function humanSummary(job: AuditJobResponse): string;
|
package/dist/output.js
ADDED
|
@@ -0,0 +1,37 @@
|
|
|
1
|
+
import { mkdirSync, writeFileSync } from 'node:fs';
|
|
2
|
+
import { resolve } from 'node:path';
|
|
3
|
+
export function writeOutputs(job, outputDir) {
|
|
4
|
+
const abs = resolve(process.cwd(), outputDir);
|
|
5
|
+
mkdirSync(abs, { recursive: true });
|
|
6
|
+
const jsonPath = `${abs}/audit-report.json`;
|
|
7
|
+
const sarifPath = `${abs}/audit-report.sarif`;
|
|
8
|
+
writeFileSync(jsonPath, JSON.stringify(job.result ?? {}, null, 2));
|
|
9
|
+
writeFileSync(sarifPath, JSON.stringify(job.sarif ?? emptySarif(), null, 2));
|
|
10
|
+
return { jsonPath, sarifPath };
|
|
11
|
+
}
|
|
12
|
+
function emptySarif() {
|
|
13
|
+
return {
|
|
14
|
+
$schema: 'https://raw.githubusercontent.com/oasis-tcs/sarif-spec/master/Schemata/sarif-schema-2.1.0.json',
|
|
15
|
+
version: '2.1.0',
|
|
16
|
+
runs: [{ tool: { driver: { name: 'TheAccessible Audit', version: '1.0.0' } }, results: [] }],
|
|
17
|
+
};
|
|
18
|
+
}
|
|
19
|
+
export function humanSummary(job) {
|
|
20
|
+
if (job.status === 'failed') {
|
|
21
|
+
return `✗ Audit failed: ${job.error ?? 'unknown error'}`;
|
|
22
|
+
}
|
|
23
|
+
const result = job.result;
|
|
24
|
+
const lines = [];
|
|
25
|
+
lines.push(`Grade: ${job.grade ?? '—'} (${(job.percentage ?? 0).toFixed(1)}%)`);
|
|
26
|
+
if (result?.summary) {
|
|
27
|
+
const s = result.summary;
|
|
28
|
+
lines.push(`Supports: ${s.supports ?? 0} Partial: ${s.partiallySupports ?? 0} Fails: ${s.doesNotSupport ?? 0} N/A: ${s.notApplicable ?? 0}`);
|
|
29
|
+
}
|
|
30
|
+
if (result?.failingCriteria?.length) {
|
|
31
|
+
lines.push('Top regressions:');
|
|
32
|
+
for (const c of result.failingCriteria.slice(0, 5)) {
|
|
33
|
+
lines.push(` • ${c.id} ${c.name} — ${c.conformance}`);
|
|
34
|
+
}
|
|
35
|
+
}
|
|
36
|
+
return lines.join('\n');
|
|
37
|
+
}
|
package/dist/poll.d.ts
ADDED
|
@@ -0,0 +1,7 @@
|
|
|
1
|
+
import type { ApiClient, AuditJobResponse } from './api-client';
|
|
2
|
+
export interface PollOptions {
|
|
3
|
+
timeoutSeconds: number;
|
|
4
|
+
initialIntervalMs?: number;
|
|
5
|
+
maxIntervalMs?: number;
|
|
6
|
+
}
|
|
7
|
+
export declare function pollUntilComplete(client: ApiClient, jobId: string, opts: PollOptions): Promise<AuditJobResponse>;
|
package/dist/poll.js
ADDED
|
@@ -0,0 +1,14 @@
|
|
|
1
|
+
export async function pollUntilComplete(client, jobId, opts) {
|
|
2
|
+
const start = Date.now();
|
|
3
|
+
const deadline = start + opts.timeoutSeconds * 1000;
|
|
4
|
+
let interval = opts.initialIntervalMs ?? 5_000;
|
|
5
|
+
const maxInterval = opts.maxIntervalMs ?? 30_000;
|
|
6
|
+
while (Date.now() < deadline) {
|
|
7
|
+
const job = await client.getJob(jobId);
|
|
8
|
+
if (job.status === 'complete' || job.status === 'failed')
|
|
9
|
+
return job;
|
|
10
|
+
await new Promise((r) => setTimeout(r, interval));
|
|
11
|
+
interval = Math.min(maxInterval, Math.floor(interval * 1.5));
|
|
12
|
+
}
|
|
13
|
+
throw new Error(`Polling timed out after ${opts.timeoutSeconds}s waiting for job ${jobId}`);
|
|
14
|
+
}
|
package/package.json
ADDED
|
@@ -0,0 +1,46 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "theaccessible-audit-ci",
|
|
3
|
+
"version": "0.2.0",
|
|
4
|
+
"description": "TheAccessible CI/CD audit CLI — grade accessibility compliance on every PR and deploy.",
|
|
5
|
+
"type": "module",
|
|
6
|
+
"repository": {
|
|
7
|
+
"type": "git",
|
|
8
|
+
"url": "https://github.com/LarryAnglin/theaccessible-platform.git",
|
|
9
|
+
"directory": "packages/audit-cli"
|
|
10
|
+
},
|
|
11
|
+
"bin": {
|
|
12
|
+
"theaccessible": "./dist/cli.js"
|
|
13
|
+
},
|
|
14
|
+
"main": "./dist/index.js",
|
|
15
|
+
"types": "./dist/index.d.ts",
|
|
16
|
+
"files": [
|
|
17
|
+
"dist/**/*",
|
|
18
|
+
"README.md",
|
|
19
|
+
"LICENSE"
|
|
20
|
+
],
|
|
21
|
+
"scripts": {
|
|
22
|
+
"build": "tsc -p tsconfig.json",
|
|
23
|
+
"dev": "tsc -p tsconfig.json --watch",
|
|
24
|
+
"test": "vitest run",
|
|
25
|
+
"test:watch": "vitest",
|
|
26
|
+
"typecheck": "tsc --noEmit",
|
|
27
|
+
"lint": "eslint src --ext .ts"
|
|
28
|
+
},
|
|
29
|
+
"dependencies": {
|
|
30
|
+
"commander": "^12.1.0",
|
|
31
|
+
"js-yaml": "^4.1.0",
|
|
32
|
+
"zod": "^3.23.8"
|
|
33
|
+
},
|
|
34
|
+
"devDependencies": {
|
|
35
|
+
"@types/js-yaml": "^4.0.9",
|
|
36
|
+
"@types/node": "^20.14.0",
|
|
37
|
+
"typescript": "^5.3.0",
|
|
38
|
+
"vitest": "^4.0.18"
|
|
39
|
+
},
|
|
40
|
+
"engines": {
|
|
41
|
+
"node": ">=20"
|
|
42
|
+
},
|
|
43
|
+
"publishConfig": {
|
|
44
|
+
"access": "public"
|
|
45
|
+
}
|
|
46
|
+
}
|