@projectwallace/css-code-coverage 0.5.0 → 0.7.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.js CHANGED
@@ -1,6 +1,5 @@
1
1
  import * as v from "valibot";
2
2
  import { format } from "@projectwallace/format-css";
3
- import { tokenTypes, tokenize } from "css-tree/tokenizer";
4
3
 
5
4
  //#region src/lib/parse-coverage.ts
6
5
  let RangeSchema = v.object({
@@ -25,69 +24,131 @@ function parse_coverage(input) {
25
24
  }
26
25
 
27
26
  //#endregion
28
- //#region src/lib/prettify.ts
29
- let irrelevant_tokens = new Set([
30
- tokenTypes.EOF,
31
- tokenTypes.BadString,
32
- tokenTypes.BadUrl,
33
- tokenTypes.WhiteSpace,
34
- tokenTypes.Semicolon,
35
- tokenTypes.Comment,
36
- tokenTypes.Colon
37
- ]);
38
- function prettify(coverage) {
39
- return coverage.map(({ url, text, ranges }) => {
40
- let formatted = format(text);
41
- let ext_ranges = ranges.map(({ start, end }) => ({
42
- start,
43
- end,
44
- tokens: []
45
- }));
46
- function is_in_range(start, end) {
47
- let range_index = 0;
48
- for (let range of ext_ranges) {
49
- if (range.start > end) return -1;
50
- if (range.start <= start && range.end >= end) return range_index;
51
- range_index++;
27
+ //#region src/lib/chunkify.ts
28
+ function merge(stylesheet) {
29
+ let new_chunks = [];
30
+ let previous_chunk;
31
+ for (let i = 0; i < stylesheet.chunks.length; i++) {
32
+ let chunk = stylesheet.chunks.at(i);
33
+ if (/^\s+$/.test(stylesheet.text.slice(chunk.start_offset, chunk.end_offset))) continue;
34
+ let latest_chunk = new_chunks.at(-1);
35
+ if (i > 0 && previous_chunk && latest_chunk) {
36
+ if (previous_chunk.is_covered === chunk.is_covered) {
37
+ latest_chunk.end_offset = chunk.end_offset;
38
+ previous_chunk = chunk;
39
+ continue;
40
+ } else if (/^\s+$/.test(stylesheet.text.slice(chunk.start_offset, chunk.end_offset)) || chunk.end_offset === chunk.start_offset) {
41
+ latest_chunk.end_offset = chunk.end_offset;
42
+ continue;
52
43
  }
53
- return -1;
54
44
  }
55
- let index = 0;
56
- tokenize(text, (type, start, end) => {
57
- if (irrelevant_tokens.has(type)) return;
58
- index++;
59
- let range_index = is_in_range(start, end);
60
- if (range_index !== -1) ext_ranges[range_index].tokens.push(index);
61
- });
62
- let new_tokens = /* @__PURE__ */ new Map();
63
- index = 0;
64
- tokenize(formatted, (type, start, end) => {
65
- if (irrelevant_tokens.has(type)) return;
66
- index++;
67
- new_tokens.set(index, {
68
- start,
69
- end
45
+ previous_chunk = chunk;
46
+ new_chunks.push(chunk);
47
+ }
48
+ return {
49
+ ...stylesheet,
50
+ chunks: new_chunks
51
+ };
52
+ }
53
+ function chunkify(stylesheet) {
54
+ let chunks = [];
55
+ let offset = 0;
56
+ for (let range of stylesheet.ranges) {
57
+ if (offset !== range.start) {
58
+ chunks.push({
59
+ start_offset: offset,
60
+ end_offset: range.start,
61
+ is_covered: false
70
62
  });
63
+ offset = range.start;
64
+ }
65
+ chunks.push({
66
+ start_offset: range.start,
67
+ end_offset: range.end,
68
+ is_covered: true
71
69
  });
72
- let new_ranges = [];
73
- for (let range of ext_ranges) {
74
- let start_token = new_tokens.get(range.tokens.at(0));
75
- let end_token = new_tokens.get(range.tokens.at(-1));
76
- if (start_token !== void 0 && end_token !== void 0) new_ranges.push({
77
- start: start_token.start,
78
- end: end_token.end
79
- });
70
+ offset = range.end;
71
+ }
72
+ if (offset !== stylesheet.text.length - 1) chunks.push({
73
+ start_offset: offset,
74
+ end_offset: stylesheet.text.length,
75
+ is_covered: false
76
+ });
77
+ return merge({
78
+ url: stylesheet.url,
79
+ text: stylesheet.text,
80
+ chunks
81
+ });
82
+ }
83
+
84
+ //#endregion
85
+ //#region src/lib/prettify.ts
86
+ function prettify(stylesheet) {
87
+ let line = 1;
88
+ let offset = 0;
89
+ let pretty_chunks = stylesheet.chunks.map((chunk, index) => {
90
+ let css = format(stylesheet.text.slice(chunk.start_offset, chunk.end_offset - 1)).trim();
91
+ if (chunk.is_covered) {
92
+ let is_last = index === stylesheet.chunks.length - 1;
93
+ if (index === 0) css = css + (is_last ? "" : "\n");
94
+ else if (index === stylesheet.chunks.length - 1) css = "\n" + css;
95
+ else css = "\n" + css + "\n";
80
96
  }
97
+ let line_count = css.split("\n").length;
98
+ let start_offset = offset;
99
+ let end_offset = offset + css.length - 1;
100
+ let start_line = line;
101
+ let end_line = line + line_count;
102
+ line = end_line;
103
+ offset = end_offset;
81
104
  return {
82
- url,
83
- text: formatted,
84
- ranges: new_ranges
105
+ ...chunk,
106
+ start_offset,
107
+ start_line,
108
+ end_line: end_line - 1,
109
+ end_offset,
110
+ css,
111
+ total_lines: end_line - start_line
85
112
  };
86
113
  });
114
+ return {
115
+ ...stylesheet,
116
+ chunks: pretty_chunks,
117
+ text: pretty_chunks.map(({ css }) => css).join("\n")
118
+ };
87
119
  }
88
120
 
89
121
  //#endregion
90
122
  //#region src/lib/decuplicate.ts
123
+ function concatenate(ranges) {
124
+ let result = [];
125
+ for (let range of ranges) if (result.length > 0 && (result.at(-1).end === range.start - 1 || result.at(-1).end === range.start)) result.at(-1).end = range.end;
126
+ else result.push(range);
127
+ return result;
128
+ }
129
+ function dedupe_list(ranges) {
130
+ let new_ranges = /* @__PURE__ */ new Set();
131
+ outer: for (let range of ranges) {
132
+ for (let processed_range of new_ranges) {
133
+ if (range.start <= processed_range.start && range.end >= processed_range.end) {
134
+ new_ranges.delete(processed_range);
135
+ new_ranges.add(range);
136
+ continue outer;
137
+ }
138
+ if (range.start >= processed_range.start && range.end <= processed_range.end) continue outer;
139
+ if (range.start < processed_range.end && range.start > processed_range.start && range.end > processed_range.end) {
140
+ new_ranges.delete(processed_range);
141
+ new_ranges.add({
142
+ start: processed_range.start,
143
+ end: range.end
144
+ });
145
+ continue outer;
146
+ }
147
+ }
148
+ new_ranges.add(range);
149
+ }
150
+ return new_ranges;
151
+ }
91
152
  /**
92
153
  * @description
93
154
  * prerequisites
@@ -117,7 +178,7 @@ function deduplicate_entries(entries) {
117
178
  return Array.from(checked_stylesheets, ([text, { url, ranges }]) => ({
118
179
  text,
119
180
  url,
120
- ranges
181
+ ranges: concatenate(dedupe_list(ranges.sort((a, b) => a.start - b.start))).sort((a, b) => a.start - b.start)
121
182
  }));
122
183
  }
123
184
 
@@ -133,17 +194,43 @@ function ext(url) {
133
194
  }
134
195
  }
135
196
 
197
+ //#endregion
198
+ //#region src/lib/html-parser.ts
199
+ /**
200
+ * @description
201
+ * Very, very naive but effective DOMParser.
202
+ * It can only find <style> elements and their .textContent
203
+ */
204
+ var DOMParser = class {
205
+ parseFromString(html, _type) {
206
+ let styles = [];
207
+ let lower = html.toLowerCase();
208
+ let pos = 0;
209
+ while (true) {
210
+ let open = lower.indexOf("<style", pos);
211
+ if (open === -1) break;
212
+ let start = lower.indexOf(">", open);
213
+ if (start === -1) break;
214
+ let close = lower.indexOf("</style>", start);
215
+ if (close === -1) break;
216
+ let text = html.slice(start + 1, close);
217
+ styles.push({ textContent: text });
218
+ pos = close + 8;
219
+ }
220
+ return { querySelectorAll(selector) {
221
+ return styles;
222
+ } };
223
+ }
224
+ };
225
+
136
226
  //#endregion
137
227
  //#region src/lib/remap-html.ts
138
- async function get_dom_parser() {
139
- if (typeof window !== "undefined" && "DOMParser" in window)
140
- /* v8 ignore */
141
- return new window.DOMParser();
142
- let { DOMParser } = await import("./esm-CWr4VY0v.js");
228
+ function get_dom_parser() {
229
+ if (typeof window !== "undefined" && "DOMParser" in window) return new window.DOMParser();
143
230
  return new DOMParser();
144
231
  }
145
- async function remap_html(html, old_ranges) {
146
- let doc = (await get_dom_parser()).parseFromString(html, "text/html");
232
+ function remap_html(html, old_ranges) {
233
+ let doc = get_dom_parser().parseFromString(html, "text/html");
147
234
  let combined_css = "";
148
235
  let new_ranges = [];
149
236
  let current_offset = 0;
@@ -171,7 +258,20 @@ async function remap_html(html, old_ranges) {
171
258
  function is_html(text) {
172
259
  return /<\/?(html|body|head|div|span|script|style)/i.test(text);
173
260
  }
174
- async function filter_coverage(coverage) {
261
+ const SELECTOR_REGEX = /(@[a-z-]+|\[[^\]]+\]|[a-zA-Z_#.-][a-zA-Z0-9_-]*)\s*\{/;
262
+ const DECLARATION_REGEX = /^\s*[a-zA-Z-]+\s*:\s*.+;?\s*$/m;
263
+ function is_css_like(text) {
264
+ return SELECTOR_REGEX.test(text) || DECLARATION_REGEX.test(text);
265
+ }
266
+ function is_js_like(text) {
267
+ try {
268
+ new Function(text);
269
+ return true;
270
+ } catch {
271
+ return false;
272
+ }
273
+ }
274
+ function filter_coverage(coverage) {
175
275
  let result = [];
176
276
  for (let entry of coverage) {
177
277
  let extension = ext(entry.url).toLowerCase();
@@ -181,7 +281,7 @@ async function filter_coverage(coverage) {
181
281
  continue;
182
282
  }
183
283
  if (is_html(entry.text)) {
184
- let { css, ranges } = await remap_html(entry.text, entry.ranges);
284
+ let { css, ranges } = remap_html(entry.text, entry.ranges);
185
285
  result.push({
186
286
  url: entry.url,
187
287
  text: css,
@@ -189,7 +289,7 @@ async function filter_coverage(coverage) {
189
289
  });
190
290
  continue;
191
291
  }
192
- result.push({
292
+ if (is_css_like(entry.text) && !is_js_like(entry.text)) result.push({
193
293
  url: entry.url,
194
294
  text: entry.text,
195
295
  ranges: entry.ranges
@@ -198,12 +298,85 @@ async function filter_coverage(coverage) {
198
298
  return result;
199
299
  }
200
300
 
301
+ //#endregion
302
+ //#region src/lib/extend-ranges.ts
303
+ const AT_SIGN = 64;
304
+ const LONGEST_ATRULE_NAME = 28;
305
+ function extend_ranges(coverage) {
306
+ return coverage.map(({ text, ranges, url }) => {
307
+ return {
308
+ text,
309
+ ranges: ranges.map((range, index) => {
310
+ let prev_range = ranges[index - 1];
311
+ for (let i = range.start; i >= range.start - LONGEST_ATRULE_NAME; i--) {
312
+ if (prev_range && prev_range.end > i) break;
313
+ let char_position = i;
314
+ if (text.charCodeAt(char_position) === AT_SIGN) {
315
+ range.start = char_position;
316
+ let next_offset = range.end;
317
+ let next_char$1 = text.charAt(next_offset);
318
+ while (/\s/.test(next_char$1)) {
319
+ next_offset++;
320
+ next_char$1 = text.charAt(next_offset);
321
+ }
322
+ if (next_char$1 === "{") range.end = range.end + 1;
323
+ break;
324
+ }
325
+ }
326
+ let offset = range.end;
327
+ let next_char = text.charAt(offset);
328
+ while (/\s/.test(next_char)) {
329
+ offset++;
330
+ next_char = text.charAt(offset);
331
+ }
332
+ if (next_char === "}") range.end = offset + 1;
333
+ return range;
334
+ }),
335
+ url
336
+ };
337
+ });
338
+ }
339
+
201
340
  //#endregion
202
341
  //#region src/lib/index.ts
203
342
  function ratio(fraction, total) {
204
343
  if (total === 0) return 0;
205
344
  return fraction / total;
206
345
  }
346
+ function calculate_stylesheet_coverage({ text, url, chunks }) {
347
+ let uncovered_bytes = 0;
348
+ let covered_bytes = 0;
349
+ let total_bytes = 0;
350
+ let total_lines = 0;
351
+ let covered_lines = 0;
352
+ let uncovered_lines = 0;
353
+ for (let chunk of chunks) {
354
+ let lines = chunk.total_lines;
355
+ let bytes = chunk.end_offset - chunk.start_offset;
356
+ total_lines += lines;
357
+ total_bytes += bytes;
358
+ if (chunk.is_covered) {
359
+ covered_lines += lines;
360
+ covered_bytes += bytes;
361
+ } else {
362
+ uncovered_lines += lines;
363
+ uncovered_bytes += bytes;
364
+ }
365
+ }
366
+ return {
367
+ url,
368
+ text,
369
+ uncovered_bytes,
370
+ covered_bytes,
371
+ total_bytes,
372
+ line_coverage_ratio: ratio(covered_lines, total_lines),
373
+ byte_coverage_ratio: ratio(covered_bytes, total_bytes),
374
+ total_lines,
375
+ covered_lines,
376
+ uncovered_lines,
377
+ chunks
378
+ };
379
+ }
207
380
  /**
208
381
  * @description
209
382
  * CSS Code Coverage calculation
@@ -215,93 +388,16 @@ function ratio(fraction, total) {
215
388
  * 4. Calculate used/unused CSS bytes (fastest path, no inspection of the actual CSS needed)
216
389
  * 5. Calculate line-coverage, byte-coverage per stylesheet
217
390
  */
218
- async function calculate_coverage(coverage) {
391
+ function calculate_coverage(coverage) {
219
392
  let total_files_found = coverage.length;
220
- let coverage_per_stylesheet = deduplicate_entries(prettify(await filter_coverage(coverage))).map(({ text, url, ranges }) => {
221
- function is_line_covered(line, start_offset) {
222
- let end = start_offset + line.length;
223
- let next_offset = end + 1;
224
- let is_empty = /^\s*$/.test(line);
225
- let is_closing_brace = line.endsWith("}");
226
- if (!is_empty && !is_closing_brace) for (let range of ranges) {
227
- if (range.start > end || range.end < start_offset) continue;
228
- if (range.start <= start_offset && range.end >= end) return true;
229
- else if (line.startsWith("@") && range.start > start_offset && range.start < next_offset) return true;
230
- }
231
- return false;
232
- }
233
- let lines = text.split("\n");
234
- let total_file_lines = lines.length;
235
- let line_coverage = new Uint8Array(total_file_lines);
236
- let file_lines_covered = 0;
237
- let file_total_bytes = text.length;
238
- let file_bytes_covered = 0;
239
- let offset = 0;
240
- for (let index = 0; index < lines.length; index++) {
241
- let line = lines[index];
242
- let start = offset;
243
- let next_offset = offset + line.length + 1;
244
- let is_empty = /^\s*$/.test(line);
245
- let is_closing_brace = line.endsWith("}");
246
- let is_in_range = is_line_covered(line, start);
247
- let is_covered = false;
248
- let prev_is_covered = index > 0 && line_coverage[index - 1] === 1;
249
- if (is_in_range && !is_closing_brace && !is_empty) is_covered = true;
250
- else if ((is_empty || is_closing_brace) && prev_is_covered) is_covered = true;
251
- else if (is_empty && !prev_is_covered && is_line_covered(lines[index + 1], next_offset)) is_covered = true;
252
- line_coverage[index] = is_covered ? 1 : 0;
253
- if (is_covered) {
254
- file_lines_covered++;
255
- file_bytes_covered += line.length + 1;
256
- }
257
- offset = next_offset;
258
- }
259
- let chunks = [{
260
- start_line: 1,
261
- is_covered: line_coverage[0] === 1,
262
- end_line: 1,
263
- total_lines: 1
264
- }];
265
- for (let index = 1; index < line_coverage.length; index++) {
266
- let is_covered = line_coverage.at(index);
267
- if (is_covered !== line_coverage.at(index - 1)) {
268
- let last_chunk$1 = chunks.at(-1);
269
- last_chunk$1.end_line = index;
270
- last_chunk$1.total_lines = index - last_chunk$1.start_line + 1;
271
- chunks.push({
272
- start_line: index + 1,
273
- is_covered: is_covered === 1,
274
- end_line: index,
275
- total_lines: 0
276
- });
277
- }
278
- }
279
- let last_chunk = chunks.at(-1);
280
- last_chunk.total_lines = line_coverage.length + 1 - last_chunk.start_line;
281
- last_chunk.end_line = line_coverage.length;
282
- return {
283
- url,
284
- text,
285
- ranges,
286
- unused_bytes: file_total_bytes - file_bytes_covered,
287
- used_bytes: file_bytes_covered,
288
- total_bytes: file_total_bytes,
289
- line_coverage_ratio: ratio(file_lines_covered, total_file_lines),
290
- byte_coverage_ratio: ratio(file_bytes_covered, file_total_bytes),
291
- line_coverage,
292
- total_lines: total_file_lines,
293
- covered_lines: file_lines_covered,
294
- uncovered_lines: total_file_lines - file_lines_covered,
295
- chunks
296
- };
297
- });
393
+ let coverage_per_stylesheet = extend_ranges(deduplicate_entries(filter_coverage(coverage))).map((sheet) => chunkify(sheet)).map((sheet) => prettify(sheet)).map((stylesheet) => calculate_stylesheet_coverage(stylesheet));
298
394
  let { total_lines, total_covered_lines, total_uncovered_lines, total_bytes, total_used_bytes, total_unused_bytes } = coverage_per_stylesheet.reduce((totals, sheet) => {
299
395
  totals.total_lines += sheet.total_lines;
300
396
  totals.total_covered_lines += sheet.covered_lines;
301
397
  totals.total_uncovered_lines += sheet.uncovered_lines;
302
398
  totals.total_bytes += sheet.total_bytes;
303
- totals.total_used_bytes += sheet.used_bytes;
304
- totals.total_unused_bytes += sheet.unused_bytes;
399
+ totals.total_used_bytes += sheet.covered_bytes;
400
+ totals.total_unused_bytes += sheet.uncovered_bytes;
305
401
  return totals;
306
402
  }, {
307
403
  total_lines: 0,
@@ -315,9 +411,9 @@ async function calculate_coverage(coverage) {
315
411
  total_files_found,
316
412
  total_bytes,
317
413
  total_lines,
318
- used_bytes: total_used_bytes,
414
+ covered_bytes: total_used_bytes,
319
415
  covered_lines: total_covered_lines,
320
- unused_bytes: total_unused_bytes,
416
+ uncovered_bytes: total_unused_bytes,
321
417
  uncovered_lines: total_uncovered_lines,
322
418
  byte_coverage_ratio: ratio(total_used_bytes, total_bytes),
323
419
  line_coverage_ratio: ratio(total_covered_lines, total_lines),
package/package.json CHANGED
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "name": "@projectwallace/css-code-coverage",
3
- "version": "0.5.0",
4
- "description": "",
3
+ "version": "0.7.0",
4
+ "description": "Generate useful CSS Code Coverage report from browser-reported coverage",
5
5
  "author": "Bart Veneman <bart@projectwallace.com>",
6
6
  "repository": {
7
7
  "type": "git",
@@ -41,13 +41,14 @@
41
41
  "build": "tsdown",
42
42
  "check": "tsc --noEmit",
43
43
  "lint": "oxlint --config .oxlintrc.json",
44
- "lint-package": "publint"
44
+ "lint-package": "publint",
45
+ "knip": "knip"
45
46
  },
46
47
  "devDependencies": {
47
48
  "@playwright/test": "^1.56.0",
48
- "@types/node": "^24.8.1",
49
+ "@types/node": "^24.9.2",
49
50
  "c8": "^10.1.3",
50
- "linkedom": "^0.18.12",
51
+ "knip": "^5.66.4",
51
52
  "oxlint": "^1.22.0",
52
53
  "publint": "^0.3.14",
53
54
  "tsdown": "^0.15.8",
@@ -55,7 +56,6 @@
55
56
  },
56
57
  "dependencies": {
57
58
  "@projectwallace/format-css": "^2.1.1",
58
- "css-tree": "^3.1.0",
59
59
  "valibot": "^1.1.0"
60
60
  }
61
61
  }