@projectwallace/css-code-coverage 0.2.3 → 0.3.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/css-code-coverage.js +172 -195
- package/dist/src/chunkify.d.ts +9 -0
- package/dist/src/chunkify.test.d.ts +1 -0
- package/dist/src/decuplicate.d.ts +1 -1
- package/dist/src/extend-ranges.d.ts +5 -0
- package/dist/src/extend-ranges.test.d.ts +1 -0
- package/dist/src/index.d.ts +5 -11
- package/dist/src/parse-coverage.d.ts +1 -1
- package/dist/src/prettify.d.ts +12 -2
- package/package.json +1 -2
|
@@ -1,247 +1,224 @@
|
|
|
1
|
-
import * as
|
|
2
|
-
import { format as
|
|
3
|
-
|
|
4
|
-
|
|
5
|
-
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
end: h.number()
|
|
1
|
+
import * as g from "valibot";
|
|
2
|
+
import { format as x } from "@projectwallace/format-css";
|
|
3
|
+
let m = g.array(
|
|
4
|
+
g.object({
|
|
5
|
+
text: g.string(),
|
|
6
|
+
url: g.string(),
|
|
7
|
+
ranges: g.array(
|
|
8
|
+
g.object({
|
|
9
|
+
start: g.number(),
|
|
10
|
+
end: g.number()
|
|
12
11
|
})
|
|
13
12
|
)
|
|
14
13
|
})
|
|
15
14
|
);
|
|
16
|
-
function
|
|
17
|
-
return
|
|
15
|
+
function p(e) {
|
|
16
|
+
return g.safeParse(m, e).success;
|
|
18
17
|
}
|
|
19
|
-
function
|
|
18
|
+
function M(e) {
|
|
20
19
|
try {
|
|
21
|
-
let
|
|
22
|
-
return
|
|
20
|
+
let t = JSON.parse(e);
|
|
21
|
+
return p(t) ? t : [];
|
|
23
22
|
} catch {
|
|
24
23
|
return [];
|
|
25
24
|
}
|
|
26
25
|
}
|
|
27
|
-
function
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
x.Colon
|
|
39
|
-
]), o = n.map(({ start: a, end: i }) => ({ start: a, end: i, tokens: [] }));
|
|
40
|
-
function v(a, i) {
|
|
41
|
-
let r = 0;
|
|
42
|
-
for (let u of o) {
|
|
43
|
-
if (u.start > i) return -1;
|
|
44
|
-
if (u.start <= a && u.end >= i)
|
|
45
|
-
return r;
|
|
46
|
-
r++;
|
|
47
|
-
}
|
|
48
|
-
return -1;
|
|
49
|
-
}
|
|
50
|
-
let _ = 0;
|
|
51
|
-
$(s, (a, i, r) => {
|
|
52
|
-
if (f.has(a)) return;
|
|
53
|
-
_++, a === x.Url && (_ += 2);
|
|
54
|
-
let u = v(i, r);
|
|
55
|
-
u !== -1 && o[u].tokens.push(_);
|
|
56
|
-
});
|
|
57
|
-
let g = /* @__PURE__ */ new Map();
|
|
58
|
-
_ = 0, $(c, (a, i, r) => {
|
|
59
|
-
f.has(a) || (_++, a === x.Url && (_ += 2), g.set(_, { start: i, end: r }));
|
|
60
|
-
});
|
|
61
|
-
let y = [];
|
|
62
|
-
for (let a of o) {
|
|
63
|
-
let i = g.get(a.tokens.at(0)), r = g.get(a.tokens.at(-1));
|
|
64
|
-
i !== void 0 && r !== void 0 && y.push({
|
|
65
|
-
start: i.start,
|
|
66
|
-
end: r.end
|
|
67
|
-
});
|
|
68
|
-
}
|
|
69
|
-
return { url: e, text: c, ranges: y };
|
|
70
|
-
});
|
|
71
|
-
}
|
|
72
|
-
function R(t) {
|
|
73
|
-
let e = /* @__PURE__ */ new Map();
|
|
74
|
-
for (let s of t) {
|
|
75
|
-
let n = s.text || "";
|
|
76
|
-
if (e.has(n)) {
|
|
77
|
-
let f = e.get(n).ranges;
|
|
78
|
-
for (let o of s.ranges) {
|
|
79
|
-
let v = !1;
|
|
80
|
-
for (let _ of f)
|
|
81
|
-
if (_.start === o.start && _.end === o.end) {
|
|
82
|
-
v = !0;
|
|
26
|
+
function k(e) {
|
|
27
|
+
let t = /* @__PURE__ */ new Map();
|
|
28
|
+
for (let n of e) {
|
|
29
|
+
let r = n.text;
|
|
30
|
+
if (t.has(r)) {
|
|
31
|
+
let l = t.get(r).ranges;
|
|
32
|
+
for (let a of n.ranges) {
|
|
33
|
+
let s = !1;
|
|
34
|
+
for (let f of l)
|
|
35
|
+
if (f.start === a.start && f.end === a.end) {
|
|
36
|
+
s = !0;
|
|
83
37
|
break;
|
|
84
38
|
}
|
|
85
|
-
|
|
39
|
+
s || l.push(a);
|
|
86
40
|
}
|
|
87
41
|
} else
|
|
88
|
-
|
|
89
|
-
url:
|
|
90
|
-
ranges:
|
|
42
|
+
t.set(r, {
|
|
43
|
+
url: n.url,
|
|
44
|
+
ranges: n.ranges
|
|
91
45
|
});
|
|
92
46
|
}
|
|
93
|
-
return
|
|
47
|
+
return Array.from(t, ([n, { url: r, ranges: o }]) => ({ text: n, url: r, ranges: o }));
|
|
94
48
|
}
|
|
95
|
-
function
|
|
49
|
+
function A(e) {
|
|
96
50
|
try {
|
|
97
|
-
let
|
|
98
|
-
return
|
|
51
|
+
let t = new URL(e);
|
|
52
|
+
return t.pathname.slice(t.pathname.lastIndexOf(".") + 1);
|
|
99
53
|
} catch {
|
|
100
|
-
let
|
|
101
|
-
return
|
|
54
|
+
let t = e.lastIndexOf(".");
|
|
55
|
+
return e.slice(t, e.indexOf("/", t) + 1);
|
|
102
56
|
}
|
|
103
57
|
}
|
|
104
|
-
function
|
|
105
|
-
let
|
|
106
|
-
for (let
|
|
107
|
-
let
|
|
108
|
-
if (!
|
|
109
|
-
|
|
110
|
-
let
|
|
111
|
-
for (let
|
|
112
|
-
|
|
113
|
-
start:
|
|
114
|
-
end:
|
|
58
|
+
function w(e, t, n) {
|
|
59
|
+
let r = e(t), o = "", l = [], a = 0, s = r.querySelectorAll("style");
|
|
60
|
+
for (let f of Array.from(s)) {
|
|
61
|
+
let c = f.textContent;
|
|
62
|
+
if (!c.trim()) continue;
|
|
63
|
+
o += c;
|
|
64
|
+
let i = t.indexOf(c), d = i + c.length;
|
|
65
|
+
for (let _ of n)
|
|
66
|
+
_.start >= i && _.end <= d && l.push({
|
|
67
|
+
start: a + (_.start - i),
|
|
68
|
+
end: a + (_.end - i)
|
|
115
69
|
});
|
|
116
|
-
|
|
70
|
+
a += c.length;
|
|
117
71
|
}
|
|
118
72
|
return {
|
|
119
|
-
css:
|
|
120
|
-
ranges:
|
|
73
|
+
css: o,
|
|
74
|
+
ranges: l
|
|
121
75
|
};
|
|
122
76
|
}
|
|
123
|
-
function
|
|
124
|
-
return /<\/?(html|body|head|div|span|script|style)/i.test(
|
|
77
|
+
function E(e) {
|
|
78
|
+
return /<\/?(html|body|head|div|span|script|style)/i.test(e);
|
|
125
79
|
}
|
|
126
|
-
function
|
|
127
|
-
let
|
|
128
|
-
for (let
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
s.push(n);
|
|
80
|
+
function O(e, t) {
|
|
81
|
+
let n = [];
|
|
82
|
+
for (let r of e) {
|
|
83
|
+
let o = A(r.url).toLowerCase();
|
|
84
|
+
if (o !== "js") {
|
|
85
|
+
if (o === "css") {
|
|
86
|
+
n.push(r);
|
|
134
87
|
continue;
|
|
135
88
|
}
|
|
136
|
-
if (
|
|
137
|
-
if (!
|
|
89
|
+
if (E(r.text)) {
|
|
90
|
+
if (!t)
|
|
138
91
|
continue;
|
|
139
|
-
let { css:
|
|
140
|
-
|
|
141
|
-
url:
|
|
142
|
-
text:
|
|
143
|
-
ranges:
|
|
92
|
+
let { css: l, ranges: a } = w(t, r.text, r.ranges);
|
|
93
|
+
n.push({
|
|
94
|
+
url: r.url,
|
|
95
|
+
text: l,
|
|
96
|
+
ranges: a
|
|
144
97
|
});
|
|
145
98
|
continue;
|
|
146
99
|
}
|
|
147
|
-
|
|
148
|
-
url:
|
|
149
|
-
text:
|
|
150
|
-
ranges:
|
|
100
|
+
n.push({
|
|
101
|
+
url: r.url,
|
|
102
|
+
text: r.text,
|
|
103
|
+
ranges: r.ranges
|
|
151
104
|
});
|
|
152
105
|
}
|
|
153
106
|
}
|
|
154
|
-
return
|
|
107
|
+
return n;
|
|
155
108
|
}
|
|
156
|
-
function
|
|
157
|
-
|
|
109
|
+
function L(e) {
|
|
110
|
+
let t = [], n = 0;
|
|
111
|
+
for (let r of e.ranges)
|
|
112
|
+
n !== r.start && (t.push({
|
|
113
|
+
start_offset: n,
|
|
114
|
+
end_offset: r.start,
|
|
115
|
+
is_covered: !1
|
|
116
|
+
}), n = r.start), t.push({
|
|
117
|
+
start_offset: r.start,
|
|
118
|
+
end_offset: r.end,
|
|
119
|
+
is_covered: !0
|
|
120
|
+
}), n = r.end;
|
|
121
|
+
return n !== e.text.length && t.push({
|
|
122
|
+
start_offset: n,
|
|
123
|
+
end_offset: e.text.length,
|
|
124
|
+
is_covered: !1
|
|
125
|
+
}), {
|
|
126
|
+
...e,
|
|
127
|
+
chunks: t
|
|
128
|
+
};
|
|
158
129
|
}
|
|
159
|
-
function
|
|
160
|
-
let
|
|
161
|
-
|
|
162
|
-
|
|
163
|
-
|
|
164
|
-
|
|
165
|
-
|
|
166
|
-
|
|
167
|
-
|
|
168
|
-
if (!(m.start > b || m.end < d)) {
|
|
169
|
-
if (m.start <= d && m.end >= b)
|
|
170
|
-
return !0;
|
|
171
|
-
if (l.startsWith("@") && m.start > d && m.start < N)
|
|
172
|
-
return !0;
|
|
173
|
-
}
|
|
174
|
-
}
|
|
175
|
-
return !1;
|
|
176
|
-
}
|
|
177
|
-
let S = r.split(`
|
|
178
|
-
`), O = S.length, p = new Uint8Array(O), C = 0, B = r.length, U = 0, E = 0;
|
|
179
|
-
for (let l = 0; l < S.length; l++) {
|
|
180
|
-
let d = S[l], b = E, W = E + d.length + 1, k = /^\s*$/.test(d), m = d.endsWith("}"), z = M(d, b), w = !1, T = l > 0 && p[l - 1] === 1;
|
|
181
|
-
(z && !m && !k || (k || m) && T || k && !T && M(S[l + 1], W)) && (w = !0), p[l] = w ? 1 : 0, w && (C++, U += d.length + 1), E = W;
|
|
182
|
-
}
|
|
183
|
-
let A = [
|
|
184
|
-
{
|
|
185
|
-
start_line: 1,
|
|
186
|
-
is_covered: p[0] === 1,
|
|
187
|
-
end_line: 1,
|
|
188
|
-
total_lines: 1
|
|
189
|
-
}
|
|
190
|
-
];
|
|
191
|
-
for (let l = 1; l < p.length; l++) {
|
|
192
|
-
let d = p[l];
|
|
193
|
-
if (d !== p[l - 1]) {
|
|
194
|
-
let b = A.at(-1);
|
|
195
|
-
b.end_line = l, b.total_lines = l - b.start_line + 1, A.push({
|
|
196
|
-
start_line: l + 1,
|
|
197
|
-
is_covered: d === 1,
|
|
198
|
-
end_line: l,
|
|
199
|
-
total_lines: 0
|
|
200
|
-
});
|
|
130
|
+
function N(e) {
|
|
131
|
+
for (let n of e)
|
|
132
|
+
for (let r of n.ranges)
|
|
133
|
+
for (let o = 1; o >= -28; o--) {
|
|
134
|
+
let l = r.start + o;
|
|
135
|
+
if (n.text.charAt(l) === "@") {
|
|
136
|
+
r.start = l;
|
|
137
|
+
break;
|
|
138
|
+
}
|
|
201
139
|
}
|
|
202
|
-
|
|
203
|
-
|
|
204
|
-
|
|
205
|
-
|
|
206
|
-
|
|
207
|
-
|
|
208
|
-
|
|
209
|
-
|
|
210
|
-
|
|
211
|
-
|
|
212
|
-
|
|
213
|
-
|
|
214
|
-
|
|
215
|
-
|
|
216
|
-
|
|
217
|
-
|
|
140
|
+
return e;
|
|
141
|
+
}
|
|
142
|
+
function S(e) {
|
|
143
|
+
let t = 1, n = 0, r = e.chunks.map((l, a) => {
|
|
144
|
+
let s = x(e.text.slice(l.start_offset, l.end_offset));
|
|
145
|
+
l.is_covered && (a === 0 ? s = s + `
|
|
146
|
+
` : a === e.chunks.length - 1 ? s = `
|
|
147
|
+
` + s : s = `
|
|
148
|
+
` + s + `
|
|
149
|
+
`);
|
|
150
|
+
let f = s.split(`
|
|
151
|
+
`).length, c = n, i = Math.max(n + s.length - 1, 0), d = t, _ = t + f;
|
|
152
|
+
return t = _, n = i, {
|
|
153
|
+
...l,
|
|
154
|
+
start_offset: c,
|
|
155
|
+
start_line: d,
|
|
156
|
+
end_line: _ - 1,
|
|
157
|
+
end_offset: i,
|
|
158
|
+
css: s,
|
|
159
|
+
total_lines: _ - d
|
|
218
160
|
};
|
|
219
|
-
})
|
|
220
|
-
|
|
161
|
+
});
|
|
162
|
+
return {
|
|
163
|
+
...e,
|
|
164
|
+
// TODO: update ranges as well?? Or remove them because we have chunks now
|
|
165
|
+
chunks: r,
|
|
166
|
+
text: r.map(({ css: l }) => l).join("")
|
|
167
|
+
};
|
|
168
|
+
}
|
|
169
|
+
function y(e, t) {
|
|
170
|
+
return t === 0 ? 0 : e / t;
|
|
171
|
+
}
|
|
172
|
+
function T(e) {
|
|
173
|
+
let { text: t, url: n, chunks: r } = e, o = 0, l = 0, a = 0, s = 0, f = 0, c = 0;
|
|
174
|
+
for (let i of r) {
|
|
175
|
+
let d = i.total_lines, _ = i.end_offset - i.start_offset;
|
|
176
|
+
s += d, a += _, i.is_covered ? (f += d, l += _) : (c += d, o += _);
|
|
177
|
+
}
|
|
178
|
+
return {
|
|
179
|
+
url: n,
|
|
180
|
+
text: t,
|
|
181
|
+
uncovered_bytes: o,
|
|
182
|
+
covered_bytes: l,
|
|
183
|
+
total_bytes: a,
|
|
184
|
+
line_coverage_ratio: y(f, s),
|
|
185
|
+
byte_coverage_ratio: y(l, a),
|
|
186
|
+
total_lines: s,
|
|
187
|
+
covered_lines: f,
|
|
188
|
+
uncovered_lines: c,
|
|
189
|
+
chunks: r
|
|
190
|
+
};
|
|
191
|
+
}
|
|
192
|
+
function C(e, t) {
|
|
193
|
+
let n = e.length;
|
|
194
|
+
if (!p(e))
|
|
195
|
+
throw new TypeError("No valid coverage data found");
|
|
196
|
+
let r = O(e, t), o = k(r), f = N(o).map((u) => L(u)).map((u) => S(u)).map((u) => T(u)), { total_lines: c, total_covered_lines: i, total_uncovered_lines: d, total_bytes: _, total_covered_bytes: h, total_uncovered_bytes: b } = f.reduce(
|
|
197
|
+
(u, v) => (u.total_lines += v.total_lines, u.total_covered_lines += v.covered_lines, u.total_uncovered_lines += v.uncovered_lines, u.total_bytes += v.total_bytes, u.total_covered_bytes += v.covered_bytes, u.total_uncovered_bytes += v.uncovered_bytes, u),
|
|
221
198
|
{
|
|
222
199
|
total_lines: 0,
|
|
223
200
|
total_covered_lines: 0,
|
|
224
201
|
total_uncovered_lines: 0,
|
|
225
202
|
total_bytes: 0,
|
|
226
|
-
|
|
227
|
-
|
|
203
|
+
total_covered_bytes: 0,
|
|
204
|
+
total_uncovered_bytes: 0
|
|
228
205
|
}
|
|
229
206
|
);
|
|
230
207
|
return {
|
|
231
|
-
total_files_found:
|
|
232
|
-
total_bytes:
|
|
233
|
-
total_lines:
|
|
234
|
-
|
|
235
|
-
covered_lines:
|
|
236
|
-
|
|
237
|
-
uncovered_lines:
|
|
238
|
-
byte_coverage_ratio:
|
|
239
|
-
line_coverage_ratio:
|
|
240
|
-
coverage_per_stylesheet:
|
|
241
|
-
total_stylesheets:
|
|
208
|
+
total_files_found: n,
|
|
209
|
+
total_bytes: _,
|
|
210
|
+
total_lines: c,
|
|
211
|
+
covered_bytes: h,
|
|
212
|
+
covered_lines: i,
|
|
213
|
+
uncovered_bytes: b,
|
|
214
|
+
uncovered_lines: d,
|
|
215
|
+
byte_coverage_ratio: y(h, _),
|
|
216
|
+
line_coverage_ratio: y(i, c),
|
|
217
|
+
coverage_per_stylesheet: f,
|
|
218
|
+
total_stylesheets: f.length
|
|
242
219
|
};
|
|
243
220
|
}
|
|
244
221
|
export {
|
|
245
|
-
|
|
246
|
-
|
|
222
|
+
C as calculate_coverage,
|
|
223
|
+
M as parse_coverage
|
|
247
224
|
};
|
|
@@ -0,0 +1,9 @@
|
|
|
1
|
+
import { Coverage } from './parse-coverage';
|
|
2
|
+
export type ChunkedCoverage = Coverage & {
|
|
3
|
+
chunks: {
|
|
4
|
+
start_offset: number;
|
|
5
|
+
end_offset: number;
|
|
6
|
+
is_covered: boolean;
|
|
7
|
+
}[];
|
|
8
|
+
};
|
|
9
|
+
export declare function chunkify_stylesheet(stylesheet: Coverage): ChunkedCoverage;
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export {};
|
|
@@ -6,4 +6,4 @@ import { Coverage } from './parse-coverage.ts';
|
|
|
6
6
|
* - if a duplicate stylesheet enters the room, we add it's ranges to the existing stylesheet's ranges
|
|
7
7
|
* - only bytes of deduplicated stylesheets are counted
|
|
8
8
|
*/
|
|
9
|
-
export declare function deduplicate_entries(entries: Coverage[]):
|
|
9
|
+
export declare function deduplicate_entries(entries: Coverage[]): Coverage[];
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export {};
|
package/dist/src/index.d.ts
CHANGED
|
@@ -1,8 +1,9 @@
|
|
|
1
|
-
import { Coverage
|
|
1
|
+
import { Coverage } from './parse-coverage.ts';
|
|
2
2
|
import { Parser } from './types.ts';
|
|
3
|
+
import { PrettifiedChunk } from './prettify.ts';
|
|
3
4
|
export type CoverageData = {
|
|
4
|
-
|
|
5
|
-
|
|
5
|
+
uncovered_bytes: number;
|
|
6
|
+
covered_bytes: number;
|
|
6
7
|
total_bytes: number;
|
|
7
8
|
line_coverage_ratio: number;
|
|
8
9
|
byte_coverage_ratio: number;
|
|
@@ -13,14 +14,7 @@ export type CoverageData = {
|
|
|
13
14
|
export type StylesheetCoverage = CoverageData & {
|
|
14
15
|
url: string;
|
|
15
16
|
text: string;
|
|
16
|
-
|
|
17
|
-
line_coverage: Uint8Array;
|
|
18
|
-
chunks: {
|
|
19
|
-
is_covered: boolean;
|
|
20
|
-
start_line: number;
|
|
21
|
-
end_line: number;
|
|
22
|
-
total_lines: number;
|
|
23
|
-
}[];
|
|
17
|
+
chunks: PrettifiedChunk[];
|
|
24
18
|
};
|
|
25
19
|
export type CoverageResult = CoverageData & {
|
|
26
20
|
total_files_found: number;
|
package/dist/src/prettify.d.ts
CHANGED
|
@@ -1,2 +1,12 @@
|
|
|
1
|
-
import { Coverage } from './parse-coverage
|
|
2
|
-
|
|
1
|
+
import { Coverage } from './parse-coverage';
|
|
2
|
+
import { ChunkedCoverage } from './chunkify';
|
|
3
|
+
export type PrettifiedChunk = ChunkedCoverage['chunks'][0] & {
|
|
4
|
+
start_line: number;
|
|
5
|
+
end_line: number;
|
|
6
|
+
total_lines: number;
|
|
7
|
+
css: string;
|
|
8
|
+
};
|
|
9
|
+
export type PrettifiedCoverage = Omit<Coverage, 'ranges'> & {
|
|
10
|
+
chunks: PrettifiedChunk[];
|
|
11
|
+
};
|
|
12
|
+
export declare function prettify(stylesheet: ChunkedCoverage): PrettifiedCoverage;
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@projectwallace/css-code-coverage",
|
|
3
|
-
"version": "0.
|
|
3
|
+
"version": "0.3.0",
|
|
4
4
|
"description": "",
|
|
5
5
|
"author": "Bart Veneman <bart@projectwallace.com>",
|
|
6
6
|
"repository": {
|
|
@@ -52,7 +52,6 @@
|
|
|
52
52
|
},
|
|
53
53
|
"dependencies": {
|
|
54
54
|
"@projectwallace/format-css": "^2.1.1",
|
|
55
|
-
"css-tree": "^3.1.0",
|
|
56
55
|
"valibot": "^1.1.0"
|
|
57
56
|
}
|
|
58
57
|
}
|