@projectwallace/css-code-coverage 0.1.1 → 0.1.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +50 -46
- package/dist/css-code-coverage.js +149 -115
- package/dist/src/filter-entries.d.ts +1 -1
- package/dist/src/index.d.ts +5 -2
- package/dist/src/parse-coverage.d.ts +2 -1
- package/package.json +1 -1
package/README.md
CHANGED
|
@@ -19,52 +19,33 @@ npm install @projectwallace/css-code-coverage
|
|
|
19
19
|
|
|
20
20
|
### Prerequisites
|
|
21
21
|
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
1. You provide a HTML parser that we use to 'scrape' the HTML in case the browser gives us not just plain CSS contents. Depending on where you run this analysis you can use:
|
|
51
|
-
|
|
52
|
-
1. Browser:
|
|
53
|
-
```ts
|
|
54
|
-
function parse_html(html) {
|
|
55
|
-
return new DOMParser().parseFromString(html, 'text/html')
|
|
56
|
-
}
|
|
57
|
-
```
|
|
58
|
-
1. Node (using [linkedom](https://github.com/WebReflection/linkedom) in this example):
|
|
59
|
-
|
|
60
|
-
```ts
|
|
61
|
-
// $ npm install linkedom
|
|
62
|
-
import { DOMParser } from 'linkedom'
|
|
63
|
-
|
|
64
|
-
function parse_html(html: string) {
|
|
65
|
-
return new DOMParser().parseFromString(html, 'text/html')
|
|
66
|
-
}
|
|
67
|
-
```
|
|
22
|
+
You have collected browser coverage data of your CSS. There are several ways to do this:
|
|
23
|
+
|
|
24
|
+
1. in the browser devtools in [Edge](https://learn.microsoft.com/en-us/microsoft-edge/devtools-guide-chromium/coverage/)/[Chrome](https://developer.chrome.com/docs/devtools/coverage/)/chromium
|
|
25
|
+
1. Via the `coverage.startCSSCoverage()` API that headless browsers like [Playwright](https://playwright.dev/docs/api/class-coverage#coverage-start-css-coverage) or [Puppeteer](https://pptr.dev/api/puppeteer.coverage.startcsscoverage/) provide.
|
|
26
|
+
|
|
27
|
+
Either way you end up with one or more JSON files that contain coverage data.
|
|
28
|
+
|
|
29
|
+
```ts
|
|
30
|
+
// Read a single JSON or a folder full of JSON files with coverage data
|
|
31
|
+
// Coverage data looks like this:
|
|
32
|
+
// {
|
|
33
|
+
// url: 'https://www.projectwallace.com/style.css',
|
|
34
|
+
// text: 'a { color: blue; text-decoration: underline; }', etc.
|
|
35
|
+
// ranges: [
|
|
36
|
+
// { start: 0, end: 46 }
|
|
37
|
+
// ]
|
|
38
|
+
// }
|
|
39
|
+
import { parse_coverage } from '@projectwallace/css-code-coverage'
|
|
40
|
+
|
|
41
|
+
let files = await fs.glob('./css-coverage/**/*.json')
|
|
42
|
+
let coverage_data = []
|
|
43
|
+
|
|
44
|
+
for (let file of files) {
|
|
45
|
+
let json_content = await fs.readFile(file, 'urf-8')
|
|
46
|
+
coverage_data.push(...parse_coverage(json_content))
|
|
47
|
+
}
|
|
48
|
+
```
|
|
68
49
|
|
|
69
50
|
### Bringing it together
|
|
70
51
|
|
|
@@ -73,3 +54,26 @@ import { calculate_coverage } from '@projectwallace/css-code-coverage'
|
|
|
73
54
|
|
|
74
55
|
let report = calculcate_coverage(coverage_data, parse_html)
|
|
75
56
|
```
|
|
57
|
+
|
|
58
|
+
See [src/index.ts](https://github.com/projectwallace/css-code-coverage/blob/main/src/index.ts) for the data that's returned.
|
|
59
|
+
|
|
60
|
+
### Optional: coverage from `<style>` blocks
|
|
61
|
+
|
|
62
|
+
Covergae generators also create coverage ranges for `<style>` blocks in HTML. If this applies to your code you should provide a HTML parser that we use to 'scrape' the HTML in case the browser gives us not just plain CSS contents. Depending on where you run this analysis you can use:
|
|
63
|
+
|
|
64
|
+
1. Browser:
|
|
65
|
+
```ts
|
|
66
|
+
function parse_html(html) {
|
|
67
|
+
return new DOMParser().parseFromString(html, 'text/html')
|
|
68
|
+
}
|
|
69
|
+
```
|
|
70
|
+
1. Node (using [linkedom](https://github.com/WebReflection/linkedom) in this example):
|
|
71
|
+
|
|
72
|
+
```ts
|
|
73
|
+
// $ npm install linkedom
|
|
74
|
+
import { DOMParser } from 'linkedom'
|
|
75
|
+
|
|
76
|
+
function parse_html(html: string) {
|
|
77
|
+
return new DOMParser().parseFromString(html, 'text/html')
|
|
78
|
+
}
|
|
79
|
+
```
|
|
@@ -1,169 +1,202 @@
|
|
|
1
|
+
import * as v from "valibot";
|
|
1
2
|
import { format as q } from "@projectwallace/format-css";
|
|
2
|
-
import { tokenTypes as
|
|
3
|
-
|
|
4
|
-
|
|
5
|
-
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
3
|
+
import { tokenTypes as b, tokenize as N } from "css-tree/tokenizer";
|
|
4
|
+
let z = v.array(
|
|
5
|
+
v.object({
|
|
6
|
+
text: v.optional(v.string()),
|
|
7
|
+
url: v.string(),
|
|
8
|
+
ranges: v.array(
|
|
9
|
+
v.object({
|
|
10
|
+
start: v.number(),
|
|
11
|
+
end: v.number()
|
|
12
|
+
})
|
|
13
|
+
)
|
|
14
|
+
})
|
|
15
|
+
);
|
|
16
|
+
function T(t) {
|
|
17
|
+
return v.safeParse(z, t).success;
|
|
18
|
+
}
|
|
19
|
+
function Q(t) {
|
|
20
|
+
try {
|
|
21
|
+
let e = JSON.parse(t);
|
|
22
|
+
return T(e) ? e : [];
|
|
23
|
+
} catch {
|
|
24
|
+
return [];
|
|
25
|
+
}
|
|
26
|
+
}
|
|
27
|
+
function F(t) {
|
|
28
|
+
return t.map(({ url: e, text: l, ranges: n }) => {
|
|
29
|
+
if (!l)
|
|
30
|
+
return { url: e, text: l, ranges: n };
|
|
31
|
+
let d = q(l), u = /* @__PURE__ */ new Set([
|
|
32
|
+
b.EOF,
|
|
33
|
+
b.BadString,
|
|
34
|
+
b.BadUrl,
|
|
35
|
+
b.WhiteSpace,
|
|
36
|
+
b.Semicolon,
|
|
37
|
+
b.Comment,
|
|
38
|
+
b.Colon
|
|
39
|
+
]), o = n.map(({ start: i, end: s }) => ({ start: i, end: s, tokens: [] }));
|
|
40
|
+
function p(i, s) {
|
|
41
|
+
let r = 0;
|
|
42
|
+
for (let _ of o) {
|
|
43
|
+
if (_.start > s) return -1;
|
|
44
|
+
if (_.start <= i && _.end >= s)
|
|
45
|
+
return r;
|
|
46
|
+
r++;
|
|
23
47
|
}
|
|
24
48
|
return -1;
|
|
25
49
|
}
|
|
26
50
|
let a = 0;
|
|
27
|
-
|
|
28
|
-
if (u.has(
|
|
29
|
-
a++,
|
|
30
|
-
let _ =
|
|
31
|
-
_ !== -1 &&
|
|
51
|
+
N(l, (i, s, r) => {
|
|
52
|
+
if (u.has(i)) return;
|
|
53
|
+
a++, i === b.Url && (a += 2);
|
|
54
|
+
let _ = p(s, r);
|
|
55
|
+
_ !== -1 && o[_].tokens.push(a);
|
|
32
56
|
});
|
|
33
57
|
let c = /* @__PURE__ */ new Map();
|
|
34
|
-
a = 0,
|
|
35
|
-
u.has(
|
|
58
|
+
a = 0, N(d, (i, s, r) => {
|
|
59
|
+
u.has(i) || (a++, i === b.Url && (a += 2), c.set(a, { start: s, end: r }));
|
|
36
60
|
});
|
|
37
61
|
let g = [];
|
|
38
|
-
for (let
|
|
39
|
-
let
|
|
40
|
-
|
|
41
|
-
start:
|
|
42
|
-
end:
|
|
62
|
+
for (let i of o) {
|
|
63
|
+
let s = c.get(i.tokens.at(0)), r = c.get(i.tokens.at(-1));
|
|
64
|
+
s !== void 0 && r !== void 0 && g.push({
|
|
65
|
+
start: s.start,
|
|
66
|
+
end: r.end
|
|
43
67
|
});
|
|
44
68
|
}
|
|
45
|
-
return { url:
|
|
69
|
+
return { url: e, text: d, ranges: g };
|
|
46
70
|
});
|
|
47
71
|
}
|
|
48
|
-
function
|
|
49
|
-
let
|
|
50
|
-
for (let
|
|
51
|
-
let
|
|
52
|
-
if (
|
|
53
|
-
let u =
|
|
54
|
-
for (let
|
|
55
|
-
let
|
|
72
|
+
function J(t) {
|
|
73
|
+
let e = /* @__PURE__ */ new Map();
|
|
74
|
+
for (let l of t) {
|
|
75
|
+
let n = l.text || "";
|
|
76
|
+
if (e.has(n)) {
|
|
77
|
+
let u = e.get(n).ranges;
|
|
78
|
+
for (let o of l.ranges) {
|
|
79
|
+
let p = !1;
|
|
56
80
|
for (let a of u)
|
|
57
|
-
if (a.start ===
|
|
58
|
-
|
|
81
|
+
if (a.start === o.start && a.end === o.end) {
|
|
82
|
+
p = !0;
|
|
59
83
|
break;
|
|
60
84
|
}
|
|
61
|
-
|
|
85
|
+
p || u.push(o);
|
|
62
86
|
}
|
|
63
87
|
} else
|
|
64
|
-
|
|
65
|
-
url:
|
|
66
|
-
ranges:
|
|
88
|
+
e.set(n, {
|
|
89
|
+
url: l.url,
|
|
90
|
+
ranges: l.ranges
|
|
67
91
|
});
|
|
68
92
|
}
|
|
69
|
-
return
|
|
93
|
+
return e;
|
|
70
94
|
}
|
|
71
|
-
function
|
|
95
|
+
function P(t) {
|
|
72
96
|
try {
|
|
73
|
-
let
|
|
74
|
-
return
|
|
97
|
+
let e = new URL(t);
|
|
98
|
+
return e.pathname.slice(e.pathname.lastIndexOf(".") + 1);
|
|
75
99
|
} catch {
|
|
76
|
-
let
|
|
77
|
-
return
|
|
100
|
+
let e = t.lastIndexOf(".");
|
|
101
|
+
return t.slice(e, t.indexOf("/", e) + 1);
|
|
78
102
|
}
|
|
79
103
|
}
|
|
80
|
-
function R(
|
|
81
|
-
let
|
|
82
|
-
for (let a of Array.from(
|
|
104
|
+
function R(t, e, l) {
|
|
105
|
+
let n = t(e), d = "", u = [], o = 0, p = n.querySelectorAll("style");
|
|
106
|
+
for (let a of Array.from(p)) {
|
|
83
107
|
let c = a.textContent;
|
|
84
108
|
if (!c.trim()) continue;
|
|
85
109
|
d += c;
|
|
86
|
-
let g =
|
|
87
|
-
for (let
|
|
88
|
-
|
|
89
|
-
start:
|
|
90
|
-
end:
|
|
110
|
+
let g = e.indexOf(c), i = g + c.length;
|
|
111
|
+
for (let s of l)
|
|
112
|
+
s.start >= g && s.end <= i && u.push({
|
|
113
|
+
start: o + (s.start - g),
|
|
114
|
+
end: o + (s.end - g)
|
|
91
115
|
});
|
|
92
|
-
|
|
116
|
+
o += c.length;
|
|
93
117
|
}
|
|
94
118
|
return {
|
|
95
119
|
css: d,
|
|
96
120
|
ranges: u
|
|
97
121
|
};
|
|
98
122
|
}
|
|
99
|
-
function
|
|
100
|
-
return /<\/?(html|body|head|div|span|script|style)/i.test(
|
|
123
|
+
function D(t) {
|
|
124
|
+
return /<\/?(html|body|head|div|span|script|style)/i.test(t);
|
|
101
125
|
}
|
|
102
|
-
function
|
|
103
|
-
let
|
|
104
|
-
for (let
|
|
105
|
-
if (!
|
|
106
|
-
let d =
|
|
126
|
+
function G(t, e) {
|
|
127
|
+
let l = [];
|
|
128
|
+
for (let n of t) {
|
|
129
|
+
if (!n.text) continue;
|
|
130
|
+
let d = P(n.url).toLowerCase();
|
|
107
131
|
if (d !== "js") {
|
|
108
132
|
if (d === "css") {
|
|
109
|
-
|
|
133
|
+
l.push(n);
|
|
110
134
|
continue;
|
|
111
135
|
}
|
|
112
|
-
if (
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
|
|
136
|
+
if (D(n.text)) {
|
|
137
|
+
if (!e)
|
|
138
|
+
continue;
|
|
139
|
+
let { css: u, ranges: o } = R(e, n.text, n.ranges);
|
|
140
|
+
l.push({
|
|
141
|
+
url: n.url,
|
|
116
142
|
text: u,
|
|
117
|
-
ranges:
|
|
143
|
+
ranges: o
|
|
118
144
|
});
|
|
119
145
|
continue;
|
|
120
146
|
}
|
|
121
|
-
|
|
122
|
-
url:
|
|
123
|
-
text:
|
|
124
|
-
ranges:
|
|
147
|
+
l.push({
|
|
148
|
+
url: n.url,
|
|
149
|
+
text: n.text,
|
|
150
|
+
ranges: n.ranges
|
|
125
151
|
});
|
|
126
152
|
}
|
|
127
153
|
}
|
|
128
|
-
return
|
|
154
|
+
return l;
|
|
155
|
+
}
|
|
156
|
+
function A(t, e) {
|
|
157
|
+
return e === 0 ? 0 : t / e;
|
|
129
158
|
}
|
|
130
|
-
function
|
|
131
|
-
let
|
|
159
|
+
function V(t, e) {
|
|
160
|
+
let l = t.length;
|
|
161
|
+
if (!T(t))
|
|
162
|
+
throw new TypeError("No valid coverage data found");
|
|
163
|
+
let n = G(t, e), d = F(n), u = J(d), o = Array.from(u).map(([r, { url: _, ranges: E }]) => {
|
|
132
164
|
function I(f, y) {
|
|
133
|
-
let
|
|
134
|
-
if (!U && !
|
|
135
|
-
for (let
|
|
136
|
-
if (!(
|
|
137
|
-
if (
|
|
165
|
+
let m = y + f.length, L = m + 1, U = /^\s*$/.test(f), x = f.endsWith("}");
|
|
166
|
+
if (!U && !x) {
|
|
167
|
+
for (let h of E)
|
|
168
|
+
if (!(h.start > m || h.end < y)) {
|
|
169
|
+
if (h.start <= y && h.end >= m)
|
|
138
170
|
return !0;
|
|
139
|
-
if (f.startsWith("@") &&
|
|
171
|
+
if (f.startsWith("@") && h.start > y && h.start < L)
|
|
140
172
|
return !0;
|
|
141
173
|
}
|
|
142
174
|
}
|
|
143
175
|
return !1;
|
|
144
176
|
}
|
|
145
|
-
let
|
|
146
|
-
`),
|
|
147
|
-
for (let f = 0; f <
|
|
148
|
-
let y =
|
|
149
|
-
(
|
|
177
|
+
let w = r.split(`
|
|
178
|
+
`), S = w.length, W = new Uint8Array(S), O = 0, j = r.length, C = 0, B = 0;
|
|
179
|
+
for (let f = 0; f < w.length; f++) {
|
|
180
|
+
let y = w[f], m = B, U = B + y.length + 1, x = /^\s*$/.test(y), h = y.endsWith("}"), $ = I(y, m), k = !1, M = f > 0 && W[f - 1] === 1;
|
|
181
|
+
($ && !h && !x || (x || h) && M || x && !M && I(w[f + 1], U)) && (k = !0), W[f] = k ? 1 : 0, k && (O++, C += y.length + 1), B = U;
|
|
150
182
|
}
|
|
151
183
|
return {
|
|
152
184
|
url: _,
|
|
153
|
-
text:
|
|
154
|
-
ranges:
|
|
155
|
-
unused_bytes:
|
|
156
|
-
used_bytes:
|
|
157
|
-
total_bytes:
|
|
158
|
-
line_coverage_ratio: O
|
|
159
|
-
byte_coverage_ratio:
|
|
160
|
-
line_coverage:
|
|
161
|
-
total_lines:
|
|
185
|
+
text: r,
|
|
186
|
+
ranges: E,
|
|
187
|
+
unused_bytes: j - C,
|
|
188
|
+
used_bytes: C,
|
|
189
|
+
total_bytes: j,
|
|
190
|
+
line_coverage_ratio: A(O, S),
|
|
191
|
+
byte_coverage_ratio: A(C, j),
|
|
192
|
+
line_coverage: W,
|
|
193
|
+
total_lines: S,
|
|
162
194
|
covered_lines: O,
|
|
163
|
-
uncovered_lines:
|
|
195
|
+
uncovered_lines: S - O
|
|
196
|
+
// TODO: { is_covered: boolean, start_offset: number, start_line: number, end_offset: number, end_line: number }[]
|
|
164
197
|
};
|
|
165
|
-
}), { total_lines:
|
|
166
|
-
(
|
|
198
|
+
}), { total_lines: p, total_covered_lines: a, total_uncovered_lines: c, total_bytes: g, total_used_bytes: i, total_unused_bytes: s } = o.reduce(
|
|
199
|
+
(r, _) => (r.total_lines += _.total_lines, r.total_covered_lines += _.covered_lines, r.total_uncovered_lines += _.uncovered_lines, r.total_bytes += _.total_bytes, r.total_used_bytes += _.used_bytes, r.total_unused_bytes += _.unused_bytes, r),
|
|
167
200
|
{
|
|
168
201
|
total_lines: 0,
|
|
169
202
|
total_covered_lines: 0,
|
|
@@ -174,19 +207,20 @@ function J(s, n) {
|
|
|
174
207
|
}
|
|
175
208
|
);
|
|
176
209
|
return {
|
|
177
|
-
total_files_found:
|
|
210
|
+
total_files_found: l,
|
|
178
211
|
total_bytes: g,
|
|
179
|
-
total_lines:
|
|
180
|
-
used_bytes:
|
|
212
|
+
total_lines: p,
|
|
213
|
+
used_bytes: i,
|
|
181
214
|
covered_lines: a,
|
|
182
|
-
unused_bytes:
|
|
215
|
+
unused_bytes: s,
|
|
183
216
|
uncovered_lines: c,
|
|
184
|
-
byte_coverage_ratio:
|
|
185
|
-
line_coverage_ratio: a
|
|
186
|
-
coverage_per_stylesheet:
|
|
187
|
-
total_stylesheets:
|
|
217
|
+
byte_coverage_ratio: A(i, g),
|
|
218
|
+
line_coverage_ratio: A(a, p),
|
|
219
|
+
coverage_per_stylesheet: o,
|
|
220
|
+
total_stylesheets: o.length
|
|
188
221
|
};
|
|
189
222
|
}
|
|
190
223
|
export {
|
|
191
|
-
|
|
224
|
+
V as calculate_coverage,
|
|
225
|
+
Q as parse_coverage
|
|
192
226
|
};
|
|
@@ -1,3 +1,3 @@
|
|
|
1
1
|
import { Coverage } from './parse-coverage.ts';
|
|
2
2
|
import { Parser } from './types.ts';
|
|
3
|
-
export declare function filter_coverage(coverage: Coverage[], parse_html
|
|
3
|
+
export declare function filter_coverage(coverage: Coverage[], parse_html?: Parser): Coverage[];
|
package/dist/src/index.d.ts
CHANGED
|
@@ -18,8 +18,8 @@ export type StylesheetCoverage = CoverageData & {
|
|
|
18
18
|
};
|
|
19
19
|
export type CoverageResult = CoverageData & {
|
|
20
20
|
total_files_found: number;
|
|
21
|
-
coverage_per_stylesheet: StylesheetCoverage[];
|
|
22
21
|
total_stylesheets: number;
|
|
22
|
+
coverage_per_stylesheet: StylesheetCoverage[];
|
|
23
23
|
};
|
|
24
24
|
/**
|
|
25
25
|
* @description
|
|
@@ -32,4 +32,7 @@ export type CoverageResult = CoverageData & {
|
|
|
32
32
|
* 4. Calculate used/unused CSS bytes (fastest path, no inspection of the actual CSS needed)
|
|
33
33
|
* 5. Calculate line-coverage, byte-coverage per stylesheet
|
|
34
34
|
*/
|
|
35
|
-
export declare function calculate_coverage(coverage: Coverage[], parse_html
|
|
35
|
+
export declare function calculate_coverage(coverage: Coverage[], parse_html?: Parser): CoverageResult;
|
|
36
|
+
export type { Coverage, Range } from './parse-coverage.ts';
|
|
37
|
+
export { parse_coverage } from './parse-coverage.ts';
|
|
38
|
+
export type { Parser } from './types.ts';
|
|
@@ -7,4 +7,5 @@ export type Coverage = {
|
|
|
7
7
|
text?: string;
|
|
8
8
|
ranges: Range[];
|
|
9
9
|
};
|
|
10
|
-
export declare function
|
|
10
|
+
export declare function is_valid_coverage(input: unknown): boolean;
|
|
11
|
+
export declare function parse_coverage(input: string): Coverage[];
|