coverage-check 0.2.1 → 0.2.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +9 -5
- package/bin/coverage-check.mjs +4 -0
- package/dist/src/cli.d.mts +1 -0
- package/dist/src/cli.mjs +14 -0
- package/dist/src/commands/check-args.d.mts +20 -0
- package/dist/src/commands/check-args.mjs +89 -0
- package/dist/src/commands/check.d.mts +4 -0
- package/dist/src/commands/check.mjs +128 -0
- package/dist/src/commands/store-put.d.mts +11 -0
- package/dist/src/commands/store-put.mjs +104 -0
- package/{src/coverage-check.mts → dist/src/coverage-check.d.mts} +1 -9
- package/dist/src/coverage-check.mjs +4 -0
- package/dist/src/diff-parser.d.mts +17 -0
- package/dist/src/diff-parser.mjs +127 -0
- package/dist/src/github-comment.d.mts +9 -0
- package/dist/src/github-comment.mjs +66 -0
- package/dist/src/lcov-merge.d.mts +5 -0
- package/dist/src/lcov-merge.mjs +29 -0
- package/dist/src/lcov-parser.d.mts +8 -0
- package/dist/src/lcov-parser.mjs +44 -0
- package/dist/src/load-artifacts.d.mts +9 -0
- package/dist/src/load-artifacts.mjs +41 -0
- package/dist/src/patch-coverage.d.mts +5 -0
- package/dist/src/patch-coverage.mjs +65 -0
- package/dist/src/report.d.mts +4 -0
- package/dist/src/report.mjs +65 -0
- package/dist/src/rules.d.mts +4 -0
- package/dist/src/rules.mjs +30 -0
- package/dist/src/s3-suite-store.d.mts +28 -0
- package/dist/src/s3-suite-store.mjs +147 -0
- package/dist/src/s3-utils.d.mts +2 -0
- package/dist/src/s3-utils.mjs +14 -0
- package/dist/src/step-summary.d.mts +9 -0
- package/dist/src/step-summary.mjs +70 -0
- package/dist/src/store-factory.d.mts +11 -0
- package/dist/src/store-factory.mjs +23 -0
- package/dist/src/suite-store.d.mts +51 -0
- package/dist/src/suite-store.mjs +154 -0
- package/dist/src/types.d.mts +36 -0
- package/dist/src/types.mjs +1 -0
- package/package.json +19 -5
- package/bin/coverage-check.mts +0 -6
- package/src/cli.mts +0 -15
- package/src/cli.test.mts +0 -45
- package/src/commands/check-args.mts +0 -110
- package/src/commands/check.mts +0 -147
- package/src/commands/check.test.mts +0 -870
- package/src/commands/store-put.mts +0 -115
- package/src/commands/store-put.test.mts +0 -248
- package/src/diff-parser.mts +0 -127
- package/src/diff-parser.test.mts +0 -178
- package/src/github-comment.mts +0 -79
- package/src/github-comment.test.mts +0 -63
- package/src/lcov-merge.mts +0 -34
- package/src/lcov-merge.test.mts +0 -57
- package/src/lcov-parser.mts +0 -46
- package/src/lcov-parser.test.mts +0 -86
- package/src/load-artifacts.mts +0 -42
- package/src/load-artifacts.test.mts +0 -115
- package/src/patch-coverage.mts +0 -82
- package/src/patch-coverage.test.mts +0 -91
- package/src/report.mts +0 -78
- package/src/report.test.mts +0 -142
- package/src/rules.mts +0 -34
- package/src/rules.test.mts +0 -98
- package/src/s3-suite-store.mts +0 -138
- package/src/s3-suite-store.test.mts +0 -308
- package/src/step-summary.mts +0 -89
- package/src/step-summary.test.mts +0 -189
- package/src/store-factory.mts +0 -23
- package/src/store-factory.test.mts +0 -67
- package/src/suite-store.mts +0 -112
- package/src/suite-store.test.mts +0 -209
- package/src/types.mts +0 -43
package/README.md
CHANGED
|
@@ -52,9 +52,11 @@ The `--suite` flag on `check` tells the tool to use fresh `--artifacts` for the
|
|
|
52
52
|
|
|
53
53
|
```text
|
|
54
54
|
<prefix>/<suite>/sha/<sha>/lcov.info # payload
|
|
55
|
-
<prefix>/<suite>/branch/<branch>/latest.json # pointer: { "sha": "...", "timestamp": "..." }
|
|
55
|
+
<prefix>/<suite>/branch/<encoded-branch>/latest.json # pointer: { "sha": "...", "timestamp": "..." }
|
|
56
56
|
```
|
|
57
57
|
|
|
58
|
+
S3-backed stores need `s3:PutObject` for writes and `s3:GetObject` for reading branch pointers and baselines. The pointer reader also checks the previous unencoded pointer key (for example `branch/main/latest.json`) so stores written before branch-name encoding remain readable.
|
|
59
|
+
|
|
58
60
|
### Suite store with filesystem
|
|
59
61
|
|
|
60
62
|
For local development or simpler deployments:
|
|
@@ -122,7 +124,7 @@ Rules are matched in order; the first match wins. Files in the diff not matched
|
|
|
122
124
|
| `--store-fs` | — | Path to a filesystem suite store directory |
|
|
123
125
|
| `--store` | — | Alias for `--store-fs` |
|
|
124
126
|
| `--store-s3` | — | S3 suite store spec: `<bucket>[/<prefix>]` |
|
|
125
|
-
| `--branch` | `"main"` | Branch pointer to follow when reading from the store
|
|
127
|
+
| `--branch` | `"main"` | Branch pointer to follow when reading from the store |
|
|
126
128
|
| `--suite` | — | Name of the current suite (no `/` or `\\`); fresh artifacts override this suite in the store |
|
|
127
129
|
| `--strip-prefix` | — | Extra path prefix to strip from LCOV `SF:` lines (repeatable) |
|
|
128
130
|
| `--pr` | — | Pull request number for sticky comment |
|
|
@@ -137,13 +139,15 @@ Rules are matched in order; the first match wins. Files in the diff not matched
|
|
|
137
139
|
| `--store-fs` | required\* | Path to a filesystem suite store directory |
|
|
138
140
|
| `--store` | — | Alias for `--store-fs` |
|
|
139
141
|
| `--store-s3` | required\* | S3 suite store spec: `<bucket>[/<prefix>]` |
|
|
140
|
-
| `--sha` |
|
|
141
|
-
| `--branch` |
|
|
142
|
+
| `--sha` | — | Git SHA to associate with this coverage payload |
|
|
143
|
+
| `--branch` | — | Branch name for the pointer (e.g. `main` or `feature/foo`) |
|
|
142
144
|
| `--artifacts` | `./coverage-artifacts` | Directory to scan for `lcov.info` files |
|
|
143
145
|
| `--strip-prefix` | — | Extra path prefix to strip from LCOV `SF:` lines (repeatable) |
|
|
144
146
|
|
|
145
147
|
\* Exactly one of `--store-fs` or `--store-s3` is required.
|
|
146
148
|
|
|
149
|
+
When `--sha` and `--branch` are both provided, `store-put` writes a SHA-addressed payload and advances the branch pointer only if the incoming timestamp is not older than the current pointer. Omitting both flags preserves the legacy `<suite>/lcov.info` storage layout.
|
|
150
|
+
|
|
147
151
|
## Programmatic API
|
|
148
152
|
|
|
149
153
|
```ts
|
|
@@ -194,7 +198,7 @@ class MyCustomStore implements SuiteStore {
|
|
|
194
198
|
async put(
|
|
195
199
|
suite: string,
|
|
196
200
|
lcov: Buffer,
|
|
197
|
-
meta
|
|
201
|
+
meta?: { sha: string; branch: string; timestamp?: string },
|
|
198
202
|
): Promise<void> {
|
|
199
203
|
/* ... */
|
|
200
204
|
}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export declare function main(argv: string[]): Promise<number>;
|
package/dist/src/cli.mjs
ADDED
|
@@ -0,0 +1,14 @@
|
|
|
1
|
+
import { main as checkMain } from "./commands/check.mjs";
|
|
2
|
+
import { main as storePutMain } from "./commands/store-put.mjs";
|
|
3
|
+
const stderr = (msg) => process.stderr.write(`${msg}\n`);
|
|
4
|
+
export async function main(argv) {
|
|
5
|
+
const sub = argv[0];
|
|
6
|
+
if (!sub || sub.startsWith("-"))
|
|
7
|
+
return checkMain(argv);
|
|
8
|
+
if (sub === "check")
|
|
9
|
+
return checkMain(argv.slice(1));
|
|
10
|
+
if (sub === "store-put")
|
|
11
|
+
return storePutMain(argv.slice(1));
|
|
12
|
+
stderr(`coverage-check: unknown subcommand: ${JSON.stringify(sub)}`);
|
|
13
|
+
return 2;
|
|
14
|
+
}
|
|
@@ -0,0 +1,20 @@
|
|
|
1
|
+
import type { SuiteStore } from "../suite-store.mts";
|
|
2
|
+
import type { GhRunner } from "../github-comment.mts";
|
|
3
|
+
export type CheckArgs = {
|
|
4
|
+
rules: string;
|
|
5
|
+
artifacts: string;
|
|
6
|
+
base: string;
|
|
7
|
+
head: string;
|
|
8
|
+
pr: number | null;
|
|
9
|
+
repo: string;
|
|
10
|
+
json: string | null;
|
|
11
|
+
stripPrefixes: string[];
|
|
12
|
+
store: SuiteStore | null;
|
|
13
|
+
suite: string | null;
|
|
14
|
+
/** Branch used to resolve baseline from the store. Default: "main". */
|
|
15
|
+
branch?: string;
|
|
16
|
+
gh?: GhRunner;
|
|
17
|
+
/** Path to append the GitHub step summary. Default: $GITHUB_STEP_SUMMARY. */
|
|
18
|
+
summaryFile?: string | null;
|
|
19
|
+
};
|
|
20
|
+
export declare function parseCheckArgs(argv: string[]): CheckArgs;
|
|
@@ -0,0 +1,89 @@
|
|
|
1
|
+
import { makeStore } from "../store-factory.mjs";
|
|
2
|
+
import { assertSafePathComponent } from "../suite-store.mjs";
|
|
3
|
+
export function parseCheckArgs(argv) {
|
|
4
|
+
let storeFs = null;
|
|
5
|
+
let storeS3 = null;
|
|
6
|
+
const args = {
|
|
7
|
+
rules: ".coverage-rules.yml",
|
|
8
|
+
artifacts: "./coverage-artifacts",
|
|
9
|
+
base: "origin/main",
|
|
10
|
+
head: "HEAD",
|
|
11
|
+
pr: null,
|
|
12
|
+
repo: process.env["GITHUB_REPOSITORY"] ?? "",
|
|
13
|
+
json: null,
|
|
14
|
+
stripPrefixes: [],
|
|
15
|
+
store: null,
|
|
16
|
+
suite: null,
|
|
17
|
+
branch: "main",
|
|
18
|
+
summaryFile: process.env["GITHUB_STEP_SUMMARY"] ?? null,
|
|
19
|
+
};
|
|
20
|
+
for (let i = 0; i < argv.length; i++) {
|
|
21
|
+
const flag = argv[i];
|
|
22
|
+
const next = argv[i + 1];
|
|
23
|
+
const val = () => {
|
|
24
|
+
if (next === undefined || next.startsWith("--")) {
|
|
25
|
+
throw new Error(`${flag} requires a value`);
|
|
26
|
+
}
|
|
27
|
+
i++;
|
|
28
|
+
return next;
|
|
29
|
+
};
|
|
30
|
+
switch (flag) {
|
|
31
|
+
case "--rules":
|
|
32
|
+
args.rules = val();
|
|
33
|
+
break;
|
|
34
|
+
case "--artifacts":
|
|
35
|
+
args.artifacts = val();
|
|
36
|
+
break;
|
|
37
|
+
case "--base":
|
|
38
|
+
args.base = val();
|
|
39
|
+
break;
|
|
40
|
+
case "--head":
|
|
41
|
+
args.head = val();
|
|
42
|
+
break;
|
|
43
|
+
case "--repo":
|
|
44
|
+
args.repo = val();
|
|
45
|
+
break;
|
|
46
|
+
case "--json":
|
|
47
|
+
args.json = val();
|
|
48
|
+
break;
|
|
49
|
+
case "--suite": {
|
|
50
|
+
const s = val();
|
|
51
|
+
assertSafePathComponent(s, "suite");
|
|
52
|
+
args.suite = s;
|
|
53
|
+
break;
|
|
54
|
+
}
|
|
55
|
+
case "--strip-prefix":
|
|
56
|
+
args.stripPrefixes.push(val());
|
|
57
|
+
break;
|
|
58
|
+
case "--branch": {
|
|
59
|
+
const branch = val();
|
|
60
|
+
if (branch.length === 0)
|
|
61
|
+
throw new Error(`invalid branch: ${JSON.stringify(branch)}`);
|
|
62
|
+
args.branch = branch;
|
|
63
|
+
break;
|
|
64
|
+
}
|
|
65
|
+
case "--store":
|
|
66
|
+
case "--store-fs":
|
|
67
|
+
storeFs = val();
|
|
68
|
+
break;
|
|
69
|
+
case "--store-s3":
|
|
70
|
+
storeS3 = val();
|
|
71
|
+
break;
|
|
72
|
+
case "--pr": {
|
|
73
|
+
const raw = val();
|
|
74
|
+
if (!/^\d+$/.test(raw) || raw === "0")
|
|
75
|
+
throw new Error(`--pr must be a positive integer, got: ${JSON.stringify(raw)}`);
|
|
76
|
+
args.pr = parseInt(raw, 10);
|
|
77
|
+
break;
|
|
78
|
+
}
|
|
79
|
+
default:
|
|
80
|
+
throw new Error(`unknown flag: ${flag}`);
|
|
81
|
+
}
|
|
82
|
+
}
|
|
83
|
+
if (storeFs && storeS3)
|
|
84
|
+
throw new Error("--store-fs and --store-s3 are mutually exclusive");
|
|
85
|
+
if (args.pr !== null && args.repo.trim() === "")
|
|
86
|
+
throw new Error("--repo is required when --pr is set (or define GITHUB_REPOSITORY)");
|
|
87
|
+
args.store = makeStore({ fs: storeFs, s3: storeS3 });
|
|
88
|
+
return args;
|
|
89
|
+
}
|
|
@@ -0,0 +1,128 @@
|
|
|
1
|
+
import { readFileSync, writeFileSync } from "node:fs";
|
|
2
|
+
import { parseLcov } from "../lcov-parser.mjs";
|
|
3
|
+
import { mergeLcov } from "../lcov-merge.mjs";
|
|
4
|
+
import { getChangedLines } from "../diff-parser.mjs";
|
|
5
|
+
import { loadRules } from "../rules.mjs";
|
|
6
|
+
import { computePatchCoverage } from "../patch-coverage.mjs";
|
|
7
|
+
import { collapseRanges, renderFailureComment } from "../report.mjs";
|
|
8
|
+
import { upsertComment } from "../github-comment.mjs";
|
|
9
|
+
import { collectLcovFiles, buildStripPrefixes } from "../load-artifacts.mjs";
|
|
10
|
+
import { writeSummary } from "../step-summary.mjs";
|
|
11
|
+
import { parseCheckArgs } from "./check-args.mjs";
|
|
12
|
+
const stdout = (msg) => process.stdout.write(`${msg}\n`);
|
|
13
|
+
const stderr = (msg) => process.stderr.write(`${msg}\n`);
|
|
14
|
+
export async function main(argv) {
|
|
15
|
+
let args;
|
|
16
|
+
try {
|
|
17
|
+
args = parseCheckArgs(argv);
|
|
18
|
+
}
|
|
19
|
+
catch (err) {
|
|
20
|
+
stderr(`coverage-check: ${String(err)}`);
|
|
21
|
+
return 2;
|
|
22
|
+
}
|
|
23
|
+
return runCheck(args);
|
|
24
|
+
}
|
|
25
|
+
export async function runCheck(args) {
|
|
26
|
+
let rules;
|
|
27
|
+
try {
|
|
28
|
+
rules = loadRules(args.rules);
|
|
29
|
+
}
|
|
30
|
+
catch (err) {
|
|
31
|
+
stderr(`coverage-check: failed to load rules: ${err}`);
|
|
32
|
+
return 2;
|
|
33
|
+
}
|
|
34
|
+
const branch = args.branch ?? "main";
|
|
35
|
+
const stripPrefixes = buildStripPrefixes(args.stripPrefixes);
|
|
36
|
+
const reports = [];
|
|
37
|
+
const suiteSources = [];
|
|
38
|
+
if (args.store !== null) {
|
|
39
|
+
const suites = await args.store.list();
|
|
40
|
+
for (const suite of suites) {
|
|
41
|
+
if (suite === args.suite)
|
|
42
|
+
continue;
|
|
43
|
+
const buf = await args.store.get(suite, { branch });
|
|
44
|
+
if (buf !== null) {
|
|
45
|
+
const lcov = parseLcov(buf.toString("utf8"), stripPrefixes);
|
|
46
|
+
reports.push(lcov);
|
|
47
|
+
suiteSources.push({ suite, source: "store", lcov });
|
|
48
|
+
}
|
|
49
|
+
}
|
|
50
|
+
}
|
|
51
|
+
const lcovFiles = collectLcovFiles(args.artifacts);
|
|
52
|
+
const freshLcovs = [];
|
|
53
|
+
for (const f of lcovFiles) {
|
|
54
|
+
const lcov = parseLcov(readFileSync(f, "utf8"), stripPrefixes);
|
|
55
|
+
reports.push(lcov);
|
|
56
|
+
freshLcovs.push(lcov);
|
|
57
|
+
}
|
|
58
|
+
if (freshLcovs.length > 0) {
|
|
59
|
+
suiteSources.push({
|
|
60
|
+
suite: args.suite ?? "(current)",
|
|
61
|
+
source: "fresh",
|
|
62
|
+
lcov: mergeLcov(freshLcovs),
|
|
63
|
+
});
|
|
64
|
+
}
|
|
65
|
+
if (reports.length === 0) {
|
|
66
|
+
stderr(`coverage-check: no coverage data found — skipping`);
|
|
67
|
+
return 0;
|
|
68
|
+
}
|
|
69
|
+
const lcov = mergeLcov(reports);
|
|
70
|
+
let diff;
|
|
71
|
+
try {
|
|
72
|
+
diff = await getChangedLines(args.base, args.head);
|
|
73
|
+
}
|
|
74
|
+
catch (err) {
|
|
75
|
+
stderr(`coverage-check: git diff failed: ${err}`);
|
|
76
|
+
return 2;
|
|
77
|
+
}
|
|
78
|
+
const { buckets, informational } = computePatchCoverage(diff, lcov, rules);
|
|
79
|
+
const passed = buckets.every((b) => b.passed);
|
|
80
|
+
const result = { buckets, informational, passed };
|
|
81
|
+
if (args.json) {
|
|
82
|
+
writeFileSync(args.json, JSON.stringify(result, null, 2));
|
|
83
|
+
}
|
|
84
|
+
const runUrl = process.env["GITHUB_SERVER_URL"] && process.env["GITHUB_RUN_ID"]
|
|
85
|
+
? `${process.env["GITHUB_SERVER_URL"]}/${args.repo}/actions/runs/${process.env["GITHUB_RUN_ID"]}`
|
|
86
|
+
: "N/A";
|
|
87
|
+
if (!passed) {
|
|
88
|
+
stdout("\ncoverage-check: FAILED\n");
|
|
89
|
+
for (const bucket of buckets.filter((b) => !b.passed)) {
|
|
90
|
+
/* c8 ignore next -- bucket.coverable is always > 0 by patch-coverage.mts L36 guard */
|
|
91
|
+
const pct = bucket.coverable > 0 ? `${((bucket.hit / bucket.coverable) * 100).toFixed(1)}%` : "—";
|
|
92
|
+
stdout(` ${bucket.rule}: ${pct} (${bucket.hit}/${bucket.coverable}) — threshold ${bucket.threshold}%`);
|
|
93
|
+
for (const file of bucket.files.filter((f) => f.uncoveredLines.length > 0)) {
|
|
94
|
+
stdout(` ${file.file}: ${collapseRanges(file.uncoveredLines)}`);
|
|
95
|
+
}
|
|
96
|
+
}
|
|
97
|
+
}
|
|
98
|
+
else {
|
|
99
|
+
stdout("\ncoverage-check: PASSED\n");
|
|
100
|
+
for (const bucket of buckets) {
|
|
101
|
+
/* c8 ignore next -- bucket.coverable is always > 0 by patch-coverage.mts L36 guard */
|
|
102
|
+
const pct = bucket.coverable > 0 ? `${((bucket.hit / bucket.coverable) * 100).toFixed(1)}%` : "—";
|
|
103
|
+
stdout(` ${bucket.rule}: ${pct} ✓`);
|
|
104
|
+
}
|
|
105
|
+
}
|
|
106
|
+
const summaryFile = args.summaryFile !== undefined
|
|
107
|
+
? args.summaryFile
|
|
108
|
+
: (process.env["GITHUB_STEP_SUMMARY"] ?? null);
|
|
109
|
+
if (summaryFile) {
|
|
110
|
+
try {
|
|
111
|
+
writeSummary(summaryFile, suiteSources, result, runUrl, branch);
|
|
112
|
+
}
|
|
113
|
+
catch (err) {
|
|
114
|
+
stderr(`coverage-check: failed to write step summary: ${err}`);
|
|
115
|
+
return 2;
|
|
116
|
+
}
|
|
117
|
+
}
|
|
118
|
+
if (args.pr !== null && args.repo) {
|
|
119
|
+
const body = passed ? "" : renderFailureComment(result, runUrl);
|
|
120
|
+
try {
|
|
121
|
+
await upsertComment(body, args.repo, args.pr, passed, args.gh);
|
|
122
|
+
}
|
|
123
|
+
catch (err) {
|
|
124
|
+
stderr(`coverage-check: failed to post PR comment: ${err}`);
|
|
125
|
+
}
|
|
126
|
+
}
|
|
127
|
+
return passed ? 0 : 1;
|
|
128
|
+
}
|
|
@@ -0,0 +1,11 @@
|
|
|
1
|
+
import type { SuiteStore } from "../suite-store.mts";
|
|
2
|
+
export type StorePutArgs = {
|
|
3
|
+
suite: string;
|
|
4
|
+
store: SuiteStore;
|
|
5
|
+
artifacts: string;
|
|
6
|
+
stripPrefixes: string[];
|
|
7
|
+
sha?: string;
|
|
8
|
+
branch?: string;
|
|
9
|
+
};
|
|
10
|
+
export declare function main(argv: string[]): Promise<number>;
|
|
11
|
+
export declare function runStorePut(args: StorePutArgs): Promise<number>;
|
|
@@ -0,0 +1,104 @@
|
|
|
1
|
+
import { readFileSync } from "node:fs";
|
|
2
|
+
import { parseLcov } from "../lcov-parser.mjs";
|
|
3
|
+
import { mergeLcov, toLcov } from "../lcov-merge.mjs";
|
|
4
|
+
import { collectLcovFiles, buildStripPrefixes } from "../load-artifacts.mjs";
|
|
5
|
+
import { makeStore } from "../store-factory.mjs";
|
|
6
|
+
import { assertSafePathComponent } from "../suite-store.mjs";
|
|
7
|
+
const stdout = (msg) => process.stdout.write(`${msg}\n`);
|
|
8
|
+
const stderr = (msg) => process.stderr.write(`${msg}\n`);
|
|
9
|
+
function parseArgs(argv) {
|
|
10
|
+
let storeFs = null;
|
|
11
|
+
let storeS3 = null;
|
|
12
|
+
const args = {
|
|
13
|
+
suite: "",
|
|
14
|
+
artifacts: "./coverage-artifacts",
|
|
15
|
+
stripPrefixes: [],
|
|
16
|
+
sha: undefined,
|
|
17
|
+
branch: undefined,
|
|
18
|
+
};
|
|
19
|
+
for (let i = 0; i < argv.length; i++) {
|
|
20
|
+
const flag = argv[i];
|
|
21
|
+
const next = argv[i + 1];
|
|
22
|
+
const val = () => {
|
|
23
|
+
if (next === undefined || next.startsWith("--")) {
|
|
24
|
+
throw new Error(`${flag} requires a value`);
|
|
25
|
+
}
|
|
26
|
+
i++;
|
|
27
|
+
return next;
|
|
28
|
+
};
|
|
29
|
+
switch (flag) {
|
|
30
|
+
case "--suite":
|
|
31
|
+
args.suite = val();
|
|
32
|
+
break;
|
|
33
|
+
case "--store":
|
|
34
|
+
case "--store-fs":
|
|
35
|
+
storeFs = val();
|
|
36
|
+
break;
|
|
37
|
+
case "--store-s3":
|
|
38
|
+
storeS3 = val();
|
|
39
|
+
break;
|
|
40
|
+
case "--artifacts":
|
|
41
|
+
args.artifacts = val();
|
|
42
|
+
break;
|
|
43
|
+
case "--strip-prefix":
|
|
44
|
+
args.stripPrefixes.push(val());
|
|
45
|
+
break;
|
|
46
|
+
case "--sha":
|
|
47
|
+
args.sha = val();
|
|
48
|
+
break;
|
|
49
|
+
case "--branch":
|
|
50
|
+
args.branch = val();
|
|
51
|
+
break;
|
|
52
|
+
default:
|
|
53
|
+
throw new Error(`unknown flag: ${flag}`);
|
|
54
|
+
}
|
|
55
|
+
}
|
|
56
|
+
if (!args.suite)
|
|
57
|
+
throw new Error("--suite is required");
|
|
58
|
+
if (storeFs && storeS3)
|
|
59
|
+
throw new Error("--store-fs and --store-s3 are mutually exclusive");
|
|
60
|
+
if (!storeFs && !storeS3)
|
|
61
|
+
throw new Error("--store-fs/--store or --store-s3 is required");
|
|
62
|
+
const hasSha = args.sha !== undefined;
|
|
63
|
+
const hasBranch = args.branch !== undefined;
|
|
64
|
+
if (hasSha !== hasBranch) {
|
|
65
|
+
throw new Error("--sha and --branch must be provided together");
|
|
66
|
+
}
|
|
67
|
+
assertSafePathComponent(args.suite, "suite");
|
|
68
|
+
if (args.sha !== undefined)
|
|
69
|
+
assertSafePathComponent(args.sha, "sha");
|
|
70
|
+
if (args.branch !== undefined && args.branch.length === 0) {
|
|
71
|
+
throw new Error(`invalid branch: ${JSON.stringify(args.branch)}`);
|
|
72
|
+
}
|
|
73
|
+
const store = makeStore({ fs: storeFs, s3: storeS3 });
|
|
74
|
+
return { ...args, store };
|
|
75
|
+
}
|
|
76
|
+
export async function main(argv) {
|
|
77
|
+
let args;
|
|
78
|
+
try {
|
|
79
|
+
args = parseArgs(argv);
|
|
80
|
+
}
|
|
81
|
+
catch (err) {
|
|
82
|
+
stderr(`coverage-check store-put: ${String(err)}`);
|
|
83
|
+
return 2;
|
|
84
|
+
}
|
|
85
|
+
return runStorePut(args);
|
|
86
|
+
}
|
|
87
|
+
export async function runStorePut(args) {
|
|
88
|
+
const lcovFiles = collectLcovFiles(args.artifacts);
|
|
89
|
+
if (lcovFiles.length === 0) {
|
|
90
|
+
stderr(`coverage-check store-put: no lcov.info files found under ${args.artifacts}`);
|
|
91
|
+
return 2;
|
|
92
|
+
}
|
|
93
|
+
const stripPrefixes = buildStripPrefixes(args.stripPrefixes);
|
|
94
|
+
const reports = lcovFiles.map((f) => parseLcov(readFileSync(f, "utf8"), stripPrefixes));
|
|
95
|
+
const merged = mergeLcov(reports);
|
|
96
|
+
const lcovText = toLcov(merged);
|
|
97
|
+
const meta = args.sha !== undefined && args.branch !== undefined
|
|
98
|
+
? { sha: args.sha, branch: args.branch }
|
|
99
|
+
: undefined;
|
|
100
|
+
await args.store.put(args.suite, Buffer.from(lcovText, "utf8"), meta);
|
|
101
|
+
const metaLabel = args.sha !== undefined ? ` sha=${args.sha} branch=${args.branch}` : "";
|
|
102
|
+
stdout(`coverage-check store-put: stored suite "${args.suite}" (${lcovFiles.length} file(s))${metaLabel}`);
|
|
103
|
+
return 0;
|
|
104
|
+
}
|
|
@@ -2,16 +2,8 @@ export { runCheck } from "./commands/check.mts";
|
|
|
2
2
|
export { runStorePut } from "./commands/store-put.mts";
|
|
3
3
|
export { FileSystemSuiteStore } from "./suite-store.mts";
|
|
4
4
|
export { S3SuiteStore } from "./s3-suite-store.mts";
|
|
5
|
-
|
|
6
5
|
export type { CheckArgs } from "./commands/check.mts";
|
|
7
6
|
export type { StorePutArgs } from "./commands/store-put.mts";
|
|
8
7
|
export type { SuiteStore, SuiteMeta } from "./suite-store.mts";
|
|
9
8
|
export type { S3SuiteStoreOptions } from "./s3-suite-store.mts";
|
|
10
|
-
export type {
|
|
11
|
-
CoverageCheckResult,
|
|
12
|
-
BucketResult,
|
|
13
|
-
FileCoverageResult,
|
|
14
|
-
LcovData,
|
|
15
|
-
DiffLines,
|
|
16
|
-
CoverageRule,
|
|
17
|
-
} from "./types.mts";
|
|
9
|
+
export type { CoverageCheckResult, BucketResult, FileCoverageResult, LcovData, DiffLines, CoverageRule, } from "./types.mts";
|
|
@@ -0,0 +1,17 @@
|
|
|
1
|
+
import type { DiffLines } from "./types.mts";
|
|
2
|
+
/**
|
|
3
|
+
* Decodes a git C-string (inner content between surrounding double-quotes).
|
|
4
|
+
* Git quotes unusual paths (non-ASCII, spaces, etc.) with core.quotePath=true.
|
|
5
|
+
* Handles octal byte escapes (\nnn), \\, \", \n, \t.
|
|
6
|
+
*/
|
|
7
|
+
export declare function decodeGitCString(s: string): string;
|
|
8
|
+
/**
|
|
9
|
+
* Parses the output of `git diff --unified=0` into a map of
|
|
10
|
+
* repo-root-relative file path → set of added/modified line numbers.
|
|
11
|
+
*
|
|
12
|
+
* Only added lines (lines in the new version) are tracked. Deleted-only
|
|
13
|
+
* hunks (where the `+` count is 0) are skipped.
|
|
14
|
+
*/
|
|
15
|
+
export declare function parseDiff(text: string): DiffLines;
|
|
16
|
+
/** Runs git diff and returns the parsed result. */
|
|
17
|
+
export declare function getChangedLines(baseRef: string, headRef: string): Promise<DiffLines>;
|
|
@@ -0,0 +1,127 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Decodes a git C-string (inner content between surrounding double-quotes).
|
|
3
|
+
* Git quotes unusual paths (non-ASCII, spaces, etc.) with core.quotePath=true.
|
|
4
|
+
* Handles octal byte escapes (\nnn), \\, \", \n, \t.
|
|
5
|
+
*/
|
|
6
|
+
export function decodeGitCString(s) {
|
|
7
|
+
const bytes = [];
|
|
8
|
+
for (let i = 0; i < s.length;) {
|
|
9
|
+
if (s[i] === "\\" && i + 1 < s.length) {
|
|
10
|
+
const next = s[i + 1];
|
|
11
|
+
if (next >= "0" && next <= "7") {
|
|
12
|
+
bytes.push(parseInt(s.slice(i + 1, i + 4), 8));
|
|
13
|
+
i += 4;
|
|
14
|
+
}
|
|
15
|
+
else if (next === "\\") {
|
|
16
|
+
bytes.push(92);
|
|
17
|
+
i += 2;
|
|
18
|
+
}
|
|
19
|
+
else if (next === '"') {
|
|
20
|
+
bytes.push(34);
|
|
21
|
+
i += 2;
|
|
22
|
+
}
|
|
23
|
+
else if (next === "n") {
|
|
24
|
+
bytes.push(10);
|
|
25
|
+
i += 2;
|
|
26
|
+
}
|
|
27
|
+
else if (next === "t") {
|
|
28
|
+
bytes.push(9);
|
|
29
|
+
i += 2;
|
|
30
|
+
}
|
|
31
|
+
else {
|
|
32
|
+
bytes.push(s.charCodeAt(i));
|
|
33
|
+
i++;
|
|
34
|
+
}
|
|
35
|
+
}
|
|
36
|
+
else {
|
|
37
|
+
bytes.push(s.charCodeAt(i));
|
|
38
|
+
i++;
|
|
39
|
+
}
|
|
40
|
+
}
|
|
41
|
+
return Buffer.from(new Uint8Array(bytes)).toString("utf8");
|
|
42
|
+
}
|
|
43
|
+
/**
|
|
44
|
+
* Parses the output of `git diff --unified=0` into a map of
|
|
45
|
+
* repo-root-relative file path → set of added/modified line numbers.
|
|
46
|
+
*
|
|
47
|
+
* Only added lines (lines in the new version) are tracked. Deleted-only
|
|
48
|
+
* hunks (where the `+` count is 0) are skipped.
|
|
49
|
+
*/
|
|
50
|
+
export function parseDiff(text) {
|
|
51
|
+
const result = new Map();
|
|
52
|
+
let currentLines = null;
|
|
53
|
+
let inHeader = false;
|
|
54
|
+
for (const raw of text.split("\n")) {
|
|
55
|
+
const line = raw.trimEnd();
|
|
56
|
+
// Only parse +++ as a file header when we are in the diff header block
|
|
57
|
+
// (after `diff --git` / `---`). Without this guard a source line beginning
|
|
58
|
+
// with `++ b/` would appear as `+++ b/…` in the diff and be misclassified.
|
|
59
|
+
let newFilePath = null;
|
|
60
|
+
if (inHeader) {
|
|
61
|
+
if (line.startsWith("+++ b/")) {
|
|
62
|
+
newFilePath = line.slice(6);
|
|
63
|
+
}
|
|
64
|
+
else if (line.startsWith('+++ "b/') && line.endsWith('"')) {
|
|
65
|
+
newFilePath = decodeGitCString(line.slice(5, -1)).slice(2);
|
|
66
|
+
}
|
|
67
|
+
}
|
|
68
|
+
if (newFilePath !== null) {
|
|
69
|
+
inHeader = false;
|
|
70
|
+
const path = newFilePath;
|
|
71
|
+
if (path === "dev/null") {
|
|
72
|
+
currentLines = null;
|
|
73
|
+
continue;
|
|
74
|
+
}
|
|
75
|
+
currentLines = result.get(path) ?? new Set();
|
|
76
|
+
result.set(path, currentLines);
|
|
77
|
+
}
|
|
78
|
+
else if (line.startsWith("--- ")) {
|
|
79
|
+
// ignore (part of diff header)
|
|
80
|
+
}
|
|
81
|
+
else if (line.startsWith("@@ ") && currentLines !== null) {
|
|
82
|
+
// @@ -old_start[,old_count] +new_start[,new_count] @@
|
|
83
|
+
const match = line.match(/@@ -\d+(?:,\d+)? \+(\d+)(?:,(\d+))? @@/);
|
|
84
|
+
if (!match)
|
|
85
|
+
continue;
|
|
86
|
+
const newStart = parseInt(match[1], 10);
|
|
87
|
+
const newCount = match[2] !== undefined ? parseInt(match[2], 10) : 1;
|
|
88
|
+
if (newCount === 0)
|
|
89
|
+
continue;
|
|
90
|
+
for (let i = 0; i < newCount; i++) {
|
|
91
|
+
currentLines.add(newStart + i);
|
|
92
|
+
}
|
|
93
|
+
}
|
|
94
|
+
else if (line.startsWith("diff --git ")) {
|
|
95
|
+
currentLines = null;
|
|
96
|
+
inHeader = true;
|
|
97
|
+
}
|
|
98
|
+
}
|
|
99
|
+
return result;
|
|
100
|
+
}
|
|
101
|
+
/** Runs git diff and returns the parsed result. */
|
|
102
|
+
export async function getChangedLines(baseRef, headRef) {
|
|
103
|
+
const { spawn } = await import("node:child_process");
|
|
104
|
+
const spawnProcess = (cmd, args) => new Promise((resolve, reject) => {
|
|
105
|
+
const chunks = [];
|
|
106
|
+
const proc = spawn(cmd, args, { stdio: ["ignore", "pipe", "inherit"] });
|
|
107
|
+
proc.stdout.on("data", (chunk) => chunks.push(chunk));
|
|
108
|
+
proc.on("error", reject);
|
|
109
|
+
proc.on("close", (code) => code === 0
|
|
110
|
+
? resolve(Buffer.concat(chunks).toString("utf8"))
|
|
111
|
+
: reject(new Error(`${cmd} exited with code ${code}`)));
|
|
112
|
+
});
|
|
113
|
+
const mergeBase = await spawnProcess("git", ["merge-base", baseRef, headRef]);
|
|
114
|
+
const base = mergeBase.trim();
|
|
115
|
+
// --src-prefix/--dst-prefix override diff.noprefix and diff.mnemonicPrefix git config
|
|
116
|
+
const diff = await spawnProcess("git", [
|
|
117
|
+
"diff",
|
|
118
|
+
"--unified=0",
|
|
119
|
+
"--inter-hunk-context=0",
|
|
120
|
+
"--no-color",
|
|
121
|
+
"--src-prefix=a/",
|
|
122
|
+
"--dst-prefix=b/",
|
|
123
|
+
base,
|
|
124
|
+
headRef,
|
|
125
|
+
]);
|
|
126
|
+
return parseDiff(diff);
|
|
127
|
+
}
|
|
@@ -0,0 +1,9 @@
|
|
|
1
|
+
export type GhRunner = (args: string[]) => Promise<string>;
|
|
2
|
+
/**
|
|
3
|
+
* Posts or updates the sticky coverage-check comment on a pull request.
|
|
4
|
+
*
|
|
5
|
+
* - On failure: upserts the failure comment body (POST if absent, PATCH if exists).
|
|
6
|
+
* - On pass with prior comment: deletes the prior comment.
|
|
7
|
+
* - On pass with no prior comment: stays silent.
|
|
8
|
+
*/
|
|
9
|
+
export declare function upsertComment(body: string, repo: string, pr: number, passed: boolean, gh?: GhRunner): Promise<void>;
|