@projectwallace/css-code-coverage 0.3.1 → 0.4.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.d.ts CHANGED
@@ -1,2 +1,78 @@
1
- export * from './src/index'
2
- export {}
1
+ import * as v from "valibot";
2
+
3
+ //#region src/lib/parse-coverage.d.ts
4
+ declare let RangeSchema: v.ObjectSchema<{
5
+ readonly start: v.NumberSchema<undefined>;
6
+ readonly end: v.NumberSchema<undefined>;
7
+ }, undefined>;
8
+ type Range = v.InferInput<typeof RangeSchema>;
9
+ declare let CoverageSchema: v.ObjectSchema<{
10
+ readonly text: v.StringSchema<undefined>;
11
+ readonly url: v.StringSchema<undefined>;
12
+ readonly ranges: v.ArraySchema<v.ObjectSchema<{
13
+ readonly start: v.NumberSchema<undefined>;
14
+ readonly end: v.NumberSchema<undefined>;
15
+ }, undefined>, undefined>;
16
+ }, undefined>;
17
+ type Coverage = v.InferInput<typeof CoverageSchema>;
18
+ declare function parse_coverage(input: string): {
19
+ text: string;
20
+ url: string;
21
+ ranges: {
22
+ start: number;
23
+ end: number;
24
+ }[];
25
+ }[];
26
+ //#endregion
27
+ //#region src/lib/types.d.ts
28
+ type NodeList = Iterable<{
29
+ textContent: string;
30
+ }> | NodeListOf<HTMLStyleElement>;
31
+ interface Parser {
32
+ (html: string): {
33
+ querySelectorAll: (selector: string) => NodeList;
34
+ };
35
+ }
36
+ //#endregion
37
+ //#region src/lib/index.d.ts
38
+ type CoverageData = {
39
+ unused_bytes: number;
40
+ used_bytes: number;
41
+ total_bytes: number;
42
+ line_coverage_ratio: number;
43
+ byte_coverage_ratio: number;
44
+ total_lines: number;
45
+ covered_lines: number;
46
+ uncovered_lines: number;
47
+ };
48
+ type StylesheetCoverage = CoverageData & {
49
+ url: string;
50
+ text: string;
51
+ ranges: Range[];
52
+ line_coverage: Uint8Array;
53
+ chunks: {
54
+ is_covered: boolean;
55
+ start_line: number;
56
+ end_line: number;
57
+ total_lines: number;
58
+ }[];
59
+ };
60
+ type CoverageResult = CoverageData & {
61
+ total_files_found: number;
62
+ total_stylesheets: number;
63
+ coverage_per_stylesheet: StylesheetCoverage[];
64
+ };
65
+ /**
66
+ * @description
67
+ * CSS Code Coverage calculation
68
+ *
69
+ * These are the steps performed to calculate coverage:
70
+ * 1. Filter eligible files / validate input
71
+ * 2. Prettify the CSS dicovered in each Coverage and update their ranges
72
+ * 3. De-duplicate Coverages: merge all ranges for CSS sources occurring multiple times
73
+ * 4. Calculate used/unused CSS bytes (fastest path, no inspection of the actual CSS needed)
74
+ * 5. Calculate line-coverage, byte-coverage per stylesheet
75
+ */
76
+ declare function calculate_coverage(coverage: Coverage[], parse_html?: Parser): CoverageResult;
77
+ //#endregion
78
+ export { type Coverage, CoverageData, CoverageResult, type Parser, type Range, StylesheetCoverage, calculate_coverage, parse_coverage };
package/dist/index.js ADDED
@@ -0,0 +1,325 @@
1
+ import * as v from "valibot";
2
+ import { format } from "@projectwallace/format-css";
3
+ import { tokenTypes, tokenize } from "css-tree/tokenizer";
4
+
5
+ //#region src/lib/parse-coverage.ts
6
+ let RangeSchema = v.object({
7
+ start: v.number(),
8
+ end: v.number()
9
+ });
10
+ let CoverageSchema = v.object({
11
+ text: v.string(),
12
+ url: v.string(),
13
+ ranges: v.array(RangeSchema)
14
+ });
15
+ function is_valid_coverage(input) {
16
+ return v.safeParse(v.array(CoverageSchema), input).success;
17
+ }
18
+ function parse_coverage(input) {
19
+ try {
20
+ let parse_result = JSON.parse(input);
21
+ return is_valid_coverage(parse_result) ? parse_result : [];
22
+ } catch {
23
+ return [];
24
+ }
25
+ }
26
+
27
+ //#endregion
28
+ //#region src/lib/prettify.ts
29
+ let irrelevant_tokens = new Set([
30
+ tokenTypes.EOF,
31
+ tokenTypes.BadString,
32
+ tokenTypes.BadUrl,
33
+ tokenTypes.WhiteSpace,
34
+ tokenTypes.Semicolon,
35
+ tokenTypes.Comment,
36
+ tokenTypes.Colon
37
+ ]);
38
+ function prettify(coverage) {
39
+ return coverage.map(({ url, text, ranges }) => {
40
+ let formatted = format(text);
41
+ let ext_ranges = ranges.map(({ start, end }) => ({
42
+ start,
43
+ end,
44
+ tokens: []
45
+ }));
46
+ function is_in_range(start, end) {
47
+ let range_index = 0;
48
+ for (let range of ext_ranges) {
49
+ if (range.start > end) return -1;
50
+ if (range.start <= start && range.end >= end) return range_index;
51
+ range_index++;
52
+ }
53
+ return -1;
54
+ }
55
+ let index = 0;
56
+ tokenize(text, (type, start, end) => {
57
+ if (irrelevant_tokens.has(type)) return;
58
+ index++;
59
+ let range_index = is_in_range(start, end);
60
+ if (range_index !== -1) ext_ranges[range_index].tokens.push(index);
61
+ });
62
+ let new_tokens = /* @__PURE__ */ new Map();
63
+ index = 0;
64
+ tokenize(formatted, (type, start, end) => {
65
+ if (irrelevant_tokens.has(type)) return;
66
+ index++;
67
+ new_tokens.set(index, {
68
+ start,
69
+ end
70
+ });
71
+ });
72
+ let new_ranges = [];
73
+ for (let range of ext_ranges) {
74
+ let start_token = new_tokens.get(range.tokens.at(0));
75
+ let end_token = new_tokens.get(range.tokens.at(-1));
76
+ if (start_token !== void 0 && end_token !== void 0) new_ranges.push({
77
+ start: start_token.start,
78
+ end: end_token.end
79
+ });
80
+ }
81
+ return {
82
+ url,
83
+ text: formatted,
84
+ ranges: new_ranges
85
+ };
86
+ });
87
+ }
88
+
89
+ //#endregion
90
+ //#region src/lib/decuplicate.ts
91
+ /**
92
+ * @description
93
+ * prerequisites
94
+ * - we check each stylesheet content only once (to avoid counting the same content multiple times)
95
+ * - if a duplicate stylesheet enters the room, we add it's ranges to the existing stylesheet's ranges
96
+ * - only bytes of deduplicated stylesheets are counted
97
+ */
98
+ function deduplicate_entries(entries) {
99
+ let checked_stylesheets = /* @__PURE__ */ new Map();
100
+ for (let entry of entries) {
101
+ let text = entry.text;
102
+ if (checked_stylesheets.has(text)) {
103
+ let ranges = checked_stylesheets.get(text).ranges;
104
+ for (let range of entry.ranges) {
105
+ let found = false;
106
+ for (let checked_range of ranges) if (checked_range.start === range.start && checked_range.end === range.end) {
107
+ found = true;
108
+ break;
109
+ }
110
+ if (!found) ranges.push(range);
111
+ }
112
+ } else checked_stylesheets.set(text, {
113
+ url: entry.url,
114
+ ranges: entry.ranges
115
+ });
116
+ }
117
+ return Array.from(checked_stylesheets, ([text, { url, ranges }]) => ({
118
+ text,
119
+ url,
120
+ ranges
121
+ }));
122
+ }
123
+
124
+ //#endregion
125
+ //#region src/lib/ext.ts
126
+ function ext(url) {
127
+ try {
128
+ let parsed_url = new URL(url);
129
+ return parsed_url.pathname.slice(parsed_url.pathname.lastIndexOf(".") + 1);
130
+ } catch {
131
+ let ext_index = url.lastIndexOf(".");
132
+ return url.slice(ext_index, url.indexOf("/", ext_index) + 1);
133
+ }
134
+ }
135
+
136
+ //#endregion
137
+ //#region src/lib/remap-html.ts
138
+ function remap_html(parse_html, html, old_ranges) {
139
+ let doc = parse_html(html);
140
+ let combined_css = "";
141
+ let new_ranges = [];
142
+ let current_offset = 0;
143
+ let style_elements = doc.querySelectorAll("style");
144
+ for (let style_element of Array.from(style_elements)) {
145
+ let style_content = style_element.textContent;
146
+ if (!style_content.trim()) continue;
147
+ combined_css += style_content;
148
+ let start_index = html.indexOf(style_content);
149
+ let end_index = start_index + style_content.length;
150
+ for (let range of old_ranges) if (range.start >= start_index && range.end <= end_index) new_ranges.push({
151
+ start: current_offset + (range.start - start_index),
152
+ end: current_offset + (range.end - start_index)
153
+ });
154
+ current_offset += style_content.length;
155
+ }
156
+ return {
157
+ css: combined_css,
158
+ ranges: new_ranges
159
+ };
160
+ }
161
+
162
+ //#endregion
163
+ //#region src/lib/filter-entries.ts
164
+ function is_html(text) {
165
+ return /<\/?(html|body|head|div|span|script|style)/i.test(text);
166
+ }
167
+ function filter_coverage(coverage, parse_html) {
168
+ let result = [];
169
+ for (let entry of coverage) {
170
+ let extension = ext(entry.url).toLowerCase();
171
+ if (extension === "js") continue;
172
+ if (extension === "css") {
173
+ result.push(entry);
174
+ continue;
175
+ }
176
+ if (is_html(entry.text)) {
177
+ if (!parse_html) continue;
178
+ let { css, ranges } = remap_html(parse_html, entry.text, entry.ranges);
179
+ result.push({
180
+ url: entry.url,
181
+ text: css,
182
+ ranges
183
+ });
184
+ continue;
185
+ }
186
+ result.push({
187
+ url: entry.url,
188
+ text: entry.text,
189
+ ranges: entry.ranges
190
+ });
191
+ }
192
+ return result;
193
+ }
194
+
195
+ //#endregion
196
+ //#region src/lib/index.ts
197
+ function ratio(fraction, total) {
198
+ if (total === 0) return 0;
199
+ return fraction / total;
200
+ }
201
+ /**
202
+ * @description
203
+ * CSS Code Coverage calculation
204
+ *
205
+ * These are the steps performed to calculate coverage:
206
+ * 1. Filter eligible files / validate input
207
+ * 2. Prettify the CSS dicovered in each Coverage and update their ranges
208
+ * 3. De-duplicate Coverages: merge all ranges for CSS sources occurring multiple times
209
+ * 4. Calculate used/unused CSS bytes (fastest path, no inspection of the actual CSS needed)
210
+ * 5. Calculate line-coverage, byte-coverage per stylesheet
211
+ */
212
+ function calculate_coverage(coverage, parse_html) {
213
+ let total_files_found = coverage.length;
214
+ if (!is_valid_coverage(coverage)) throw new TypeError("No valid coverage data found");
215
+ let coverage_per_stylesheet = deduplicate_entries(prettify(filter_coverage(coverage, parse_html))).map(({ text, url, ranges }) => {
216
+ function is_line_covered(line, start_offset) {
217
+ let end = start_offset + line.length;
218
+ let next_offset = end + 1;
219
+ let is_empty = /^\s*$/.test(line);
220
+ let is_closing_brace = line.endsWith("}");
221
+ if (!is_empty && !is_closing_brace) for (let range of ranges) {
222
+ if (range.start > end || range.end < start_offset) continue;
223
+ if (range.start <= start_offset && range.end >= end) return true;
224
+ else if (line.startsWith("@") && range.start > start_offset && range.start < next_offset) return true;
225
+ }
226
+ return false;
227
+ }
228
+ let lines = text.split("\n");
229
+ let total_file_lines = lines.length;
230
+ let line_coverage = new Uint8Array(total_file_lines);
231
+ let file_lines_covered = 0;
232
+ let file_total_bytes = text.length;
233
+ let file_bytes_covered = 0;
234
+ let offset = 0;
235
+ for (let index = 0; index < lines.length; index++) {
236
+ let line = lines[index];
237
+ let start = offset;
238
+ let next_offset = offset + line.length + 1;
239
+ let is_empty = /^\s*$/.test(line);
240
+ let is_closing_brace = line.endsWith("}");
241
+ let is_in_range = is_line_covered(line, start);
242
+ let is_covered = false;
243
+ let prev_is_covered = index > 0 && line_coverage[index - 1] === 1;
244
+ if (is_in_range && !is_closing_brace && !is_empty) is_covered = true;
245
+ else if ((is_empty || is_closing_brace) && prev_is_covered) is_covered = true;
246
+ else if (is_empty && !prev_is_covered && is_line_covered(lines[index + 1], next_offset)) is_covered = true;
247
+ line_coverage[index] = is_covered ? 1 : 0;
248
+ if (is_covered) {
249
+ file_lines_covered++;
250
+ file_bytes_covered += line.length + 1;
251
+ }
252
+ offset = next_offset;
253
+ }
254
+ let chunks = [{
255
+ start_line: 1,
256
+ is_covered: line_coverage[0] === 1,
257
+ end_line: 1,
258
+ total_lines: 1
259
+ }];
260
+ for (let index = 1; index < line_coverage.length; index++) {
261
+ let is_covered = line_coverage.at(index);
262
+ if (is_covered !== line_coverage.at(index - 1)) {
263
+ let last_chunk$1 = chunks.at(-1);
264
+ last_chunk$1.end_line = index;
265
+ last_chunk$1.total_lines = index - last_chunk$1.start_line + 1;
266
+ chunks.push({
267
+ start_line: index + 1,
268
+ is_covered: is_covered === 1,
269
+ end_line: index,
270
+ total_lines: 0
271
+ });
272
+ }
273
+ }
274
+ let last_chunk = chunks.at(-1);
275
+ last_chunk.total_lines = line_coverage.length + 1 - last_chunk.start_line;
276
+ last_chunk.end_line = line_coverage.length;
277
+ return {
278
+ url,
279
+ text,
280
+ ranges,
281
+ unused_bytes: file_total_bytes - file_bytes_covered,
282
+ used_bytes: file_bytes_covered,
283
+ total_bytes: file_total_bytes,
284
+ line_coverage_ratio: ratio(file_lines_covered, total_file_lines),
285
+ byte_coverage_ratio: ratio(file_bytes_covered, file_total_bytes),
286
+ line_coverage,
287
+ total_lines: total_file_lines,
288
+ covered_lines: file_lines_covered,
289
+ uncovered_lines: total_file_lines - file_lines_covered,
290
+ chunks
291
+ };
292
+ });
293
+ let { total_lines, total_covered_lines, total_uncovered_lines, total_bytes, total_used_bytes, total_unused_bytes } = coverage_per_stylesheet.reduce((totals, sheet) => {
294
+ totals.total_lines += sheet.total_lines;
295
+ totals.total_covered_lines += sheet.covered_lines;
296
+ totals.total_uncovered_lines += sheet.uncovered_lines;
297
+ totals.total_bytes += sheet.total_bytes;
298
+ totals.total_used_bytes += sheet.used_bytes;
299
+ totals.total_unused_bytes += sheet.unused_bytes;
300
+ return totals;
301
+ }, {
302
+ total_lines: 0,
303
+ total_covered_lines: 0,
304
+ total_uncovered_lines: 0,
305
+ total_bytes: 0,
306
+ total_used_bytes: 0,
307
+ total_unused_bytes: 0
308
+ });
309
+ return {
310
+ total_files_found,
311
+ total_bytes,
312
+ total_lines,
313
+ used_bytes: total_used_bytes,
314
+ covered_lines: total_covered_lines,
315
+ unused_bytes: total_unused_bytes,
316
+ uncovered_lines: total_uncovered_lines,
317
+ byte_coverage_ratio: ratio(total_used_bytes, total_bytes),
318
+ line_coverage_ratio: ratio(total_covered_lines, total_lines),
319
+ coverage_per_stylesheet,
320
+ total_stylesheets: coverage_per_stylesheet.length
321
+ };
322
+ }
323
+
324
+ //#endregion
325
+ export { calculate_coverage, parse_coverage };
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@projectwallace/css-code-coverage",
3
- "version": "0.3.1",
3
+ "version": "0.4.0",
4
4
  "description": "",
5
5
  "author": "Bart Veneman <bart@projectwallace.com>",
6
6
  "repository": {
@@ -21,34 +21,37 @@
21
21
  ],
22
22
  "license": "EUPL-1.2",
23
23
  "engines": {
24
- "node": ">=18.0.0"
24
+ "node": ">=22.18.0"
25
25
  },
26
26
  "type": "module",
27
27
  "files": [
28
28
  "dist"
29
29
  ],
30
- "main": "dist/css-code-coverage.js",
30
+ "bin": {
31
+ "css-coverage": "dist/cli.js"
32
+ },
33
+ "main": "dist/index.js",
31
34
  "exports": {
32
35
  "types": "./dist/index.d.ts",
33
- "default": "./dist/css-code-coverage.js"
36
+ "default": "./dist/index.js"
34
37
  },
35
38
  "types": "dist/index.d.ts",
36
39
  "scripts": {
37
40
  "test": "c8 --reporter=text playwright test",
38
- "build": "vite build",
41
+ "build": "tsdown",
39
42
  "check": "tsc --noEmit",
40
43
  "lint": "oxlint --config .oxlintrc.json",
41
44
  "lint-package": "publint"
42
45
  },
43
46
  "devDependencies": {
44
47
  "@playwright/test": "^1.56.0",
48
+ "@types/node": "^24.8.1",
45
49
  "c8": "^10.1.3",
46
50
  "linkedom": "^0.18.12",
47
51
  "oxlint": "^1.22.0",
48
52
  "publint": "^0.3.14",
49
- "typescript": "^5.9.3",
50
- "vite": "^7.1.9",
51
- "vite-plugin-dts": "^4.5.4"
53
+ "tsdown": "^0.15.8",
54
+ "typescript": "^5.9.3"
52
55
  },
53
56
  "dependencies": {
54
57
  "@projectwallace/format-css": "^2.1.1",
@@ -1,247 +0,0 @@
1
- import * as h from "valibot";
2
- import { format as F } from "@projectwallace/format-css";
3
- import { tokenTypes as x, tokenize as $ } from "css-tree/tokenizer";
4
- let J = h.array(
5
- h.object({
6
- text: h.optional(h.string()),
7
- url: h.string(),
8
- ranges: h.array(
9
- h.object({
10
- start: h.number(),
11
- end: h.number()
12
- })
13
- )
14
- })
15
- );
16
- function q(t) {
17
- return h.safeParse(J, t).success;
18
- }
19
- function X(t) {
20
- try {
21
- let e = JSON.parse(t);
22
- return q(e) ? e : [];
23
- } catch {
24
- return [];
25
- }
26
- }
27
- function P(t) {
28
- return t.map(({ url: e, text: s, ranges: n }) => {
29
- if (!s)
30
- return { url: e, text: s, ranges: n };
31
- let c = F(s), f = /* @__PURE__ */ new Set([
32
- x.EOF,
33
- x.BadString,
34
- x.BadUrl,
35
- x.WhiteSpace,
36
- x.Semicolon,
37
- x.Comment,
38
- x.Colon
39
- ]), o = n.map(({ start: a, end: i }) => ({ start: a, end: i, tokens: [] }));
40
- function v(a, i) {
41
- let r = 0;
42
- for (let u of o) {
43
- if (u.start > i) return -1;
44
- if (u.start <= a && u.end >= i)
45
- return r;
46
- r++;
47
- }
48
- return -1;
49
- }
50
- let _ = 0;
51
- $(s, (a, i, r) => {
52
- if (f.has(a)) return;
53
- _++, a === x.Url && (_ += 2);
54
- let u = v(i, r);
55
- u !== -1 && o[u].tokens.push(_);
56
- });
57
- let g = /* @__PURE__ */ new Map();
58
- _ = 0, $(c, (a, i, r) => {
59
- f.has(a) || (_++, a === x.Url && (_ += 2), g.set(_, { start: i, end: r }));
60
- });
61
- let y = [];
62
- for (let a of o) {
63
- let i = g.get(a.tokens.at(0)), r = g.get(a.tokens.at(-1));
64
- i !== void 0 && r !== void 0 && y.push({
65
- start: i.start,
66
- end: r.end
67
- });
68
- }
69
- return { url: e, text: c, ranges: y };
70
- });
71
- }
72
- function R(t) {
73
- let e = /* @__PURE__ */ new Map();
74
- for (let s of t) {
75
- let n = s.text || "";
76
- if (e.has(n)) {
77
- let f = e.get(n).ranges;
78
- for (let o of s.ranges) {
79
- let v = !1;
80
- for (let _ of f)
81
- if (_.start === o.start && _.end === o.end) {
82
- v = !0;
83
- break;
84
- }
85
- v || f.push(o);
86
- }
87
- } else
88
- e.set(n, {
89
- url: s.url,
90
- ranges: s.ranges
91
- });
92
- }
93
- return e;
94
- }
95
- function D(t) {
96
- try {
97
- let e = new URL(t);
98
- return e.pathname.slice(e.pathname.lastIndexOf(".") + 1);
99
- } catch {
100
- let e = t.lastIndexOf(".");
101
- return t.slice(e, t.indexOf("/", e) + 1);
102
- }
103
- }
104
- function G(t, e, s) {
105
- let n = t(e), c = "", f = [], o = 0, v = n.querySelectorAll("style");
106
- for (let _ of Array.from(v)) {
107
- let g = _.textContent;
108
- if (!g.trim()) continue;
109
- c += g;
110
- let y = e.indexOf(g), a = y + g.length;
111
- for (let i of s)
112
- i.start >= y && i.end <= a && f.push({
113
- start: o + (i.start - y),
114
- end: o + (i.end - y)
115
- });
116
- o += g.length;
117
- }
118
- return {
119
- css: c,
120
- ranges: f
121
- };
122
- }
123
- function H(t) {
124
- return /<\/?(html|body|head|div|span|script|style)/i.test(t);
125
- }
126
- function K(t, e) {
127
- let s = [];
128
- for (let n of t) {
129
- if (!n.text) continue;
130
- let c = D(n.url).toLowerCase();
131
- if (c !== "js") {
132
- if (c === "css") {
133
- s.push(n);
134
- continue;
135
- }
136
- if (H(n.text)) {
137
- if (!e)
138
- continue;
139
- let { css: f, ranges: o } = G(e, n.text, n.ranges);
140
- s.push({
141
- url: n.url,
142
- text: f,
143
- ranges: o
144
- });
145
- continue;
146
- }
147
- s.push({
148
- url: n.url,
149
- text: n.text,
150
- ranges: n.ranges
151
- });
152
- }
153
- }
154
- return s;
155
- }
156
- function j(t, e) {
157
- return e === 0 ? 0 : t / e;
158
- }
159
- function Y(t, e) {
160
- let s = t.length;
161
- if (!q(t))
162
- throw new TypeError("No valid coverage data found");
163
- let n = K(t, e), c = P(n), f = R(c), o = Array.from(f).map(([r, { url: u, ranges: L }]) => {
164
- function M(l, d) {
165
- let b = d + l.length, N = b + 1, W = /^\s*$/.test(l), k = l.endsWith("}");
166
- if (!W && !k) {
167
- for (let m of L)
168
- if (!(m.start > b || m.end < d)) {
169
- if (m.start <= d && m.end >= b)
170
- return !0;
171
- if (l.startsWith("@") && m.start > d && m.start < N)
172
- return !0;
173
- }
174
- }
175
- return !1;
176
- }
177
- let S = r.split(`
178
- `), O = S.length, p = new Uint8Array(O), C = 0, B = r.length, U = 0, E = 0;
179
- for (let l = 0; l < S.length; l++) {
180
- let d = S[l], b = E, W = E + d.length + 1, k = /^\s*$/.test(d), m = d.endsWith("}"), z = M(d, b), w = !1, T = l > 0 && p[l - 1] === 1;
181
- (z && !m && !k || (k || m) && T || k && !T && M(S[l + 1], W)) && (w = !0), p[l] = w ? 1 : 0, w && (C++, U += d.length + 1), E = W;
182
- }
183
- let A = [
184
- {
185
- start_line: 1,
186
- is_covered: p[0] === 1,
187
- end_line: 1,
188
- total_lines: 1
189
- }
190
- ];
191
- for (let l = 1; l < p.length; l++) {
192
- let d = p[l];
193
- if (d !== p[l - 1]) {
194
- let b = A.at(-1);
195
- b.end_line = l, b.total_lines = l - b.start_line + 1, A.push({
196
- start_line: l + 1,
197
- is_covered: d === 1,
198
- end_line: l,
199
- total_lines: 0
200
- });
201
- }
202
- }
203
- let I = A.at(-1);
204
- return I.total_lines = p.length + 1 - I.start_line, I.end_line = p.length, {
205
- url: u,
206
- text: r,
207
- ranges: L,
208
- unused_bytes: B - U,
209
- used_bytes: U,
210
- total_bytes: B,
211
- line_coverage_ratio: j(C, O),
212
- byte_coverage_ratio: j(U, B),
213
- line_coverage: p,
214
- total_lines: O,
215
- covered_lines: C,
216
- uncovered_lines: O - C,
217
- chunks: A
218
- };
219
- }), { total_lines: v, total_covered_lines: _, total_uncovered_lines: g, total_bytes: y, total_used_bytes: a, total_unused_bytes: i } = o.reduce(
220
- (r, u) => (r.total_lines += u.total_lines, r.total_covered_lines += u.covered_lines, r.total_uncovered_lines += u.uncovered_lines, r.total_bytes += u.total_bytes, r.total_used_bytes += u.used_bytes, r.total_unused_bytes += u.unused_bytes, r),
221
- {
222
- total_lines: 0,
223
- total_covered_lines: 0,
224
- total_uncovered_lines: 0,
225
- total_bytes: 0,
226
- total_used_bytes: 0,
227
- total_unused_bytes: 0
228
- }
229
- );
230
- return {
231
- total_files_found: s,
232
- total_bytes: y,
233
- total_lines: v,
234
- used_bytes: a,
235
- covered_lines: _,
236
- unused_bytes: i,
237
- uncovered_lines: g,
238
- byte_coverage_ratio: j(a, y),
239
- line_coverage_ratio: j(_, v),
240
- coverage_per_stylesheet: o,
241
- total_stylesheets: o.length
242
- };
243
- }
244
- export {
245
- Y as calculate_coverage,
246
- X as parse_coverage
247
- };
@@ -1,9 +0,0 @@
1
- import { Coverage } from './parse-coverage.ts';
2
- /**
3
- * @description
4
- * prerequisites
5
- * - we check each stylesheet content only once (to avoid counting the same content multiple times)
6
- * - if a duplicate stylesheet enters the room, we add it's ranges to the existing stylesheet's ranges
7
- * - only bytes of deduplicated stylesheets are counted
8
- */
9
- export declare function deduplicate_entries(entries: Coverage[]): Map<NonNullable<Coverage['text']>, Pick<Coverage, 'ranges' | 'url'>>;